gate_OutEffHop_opt-125m / all_results.json
robin
gate_OutEffHop_opt-125m
c2d80ca
{"perplexity": 15.706686202168713, "model.decoder.final_layer_norm": 90.96276274729331, "model.decoder.layers.0": 0.4204075758269325, "model.decoder.layers.1": 0.4908598484700949, "model.decoder.layers.2": 0.5028251055120726, "model.decoder.layers.3": 0.691462532509645, "model.decoder.layers.4": 0.8262411430478096, "model.decoder.layers.5": 1.0756556153220116, "model.decoder.layers.6": 1.366795804172793, "model.decoder.layers.7": 1.5691746961852069, "model.decoder.layers.8": 2.1472622930385104, "model.decoder.layers.9": 3.6863469854770314, "model.decoder.layers.10": 6.168255855968336, "model.decoder.layers.11": 7.57078872866513, "model.decoder.layers.0.fc2": 0.1039871887996039, "model.decoder.layers.1.fc2": 0.05608035337503627, "model.decoder.layers.2.fc2": 0.03349900133207189, "model.decoder.layers.3.fc2": 0.06225394718614727, "model.decoder.layers.4.fc2": 0.06453566397655025, "model.decoder.layers.5.fc2": 0.07603821023602894, "model.decoder.layers.6.fc2": 0.11477861616001905, "model.decoder.layers.7.fc2": 0.16301063338992816, "model.decoder.layers.8.fc2": 0.2619496168986678, "model.decoder.layers.9.fc2": 0.414940554808086, "model.decoder.layers.10.fc2": 0.4336998329416942, "model.decoder.layers.11.fc2": 0.28299739546155805, "model.decoder.layers.0.final_layer_norm": 0.45984292787960546, "model.decoder.layers.1.final_layer_norm": 0.4209096071819691, "model.decoder.layers.2.final_layer_norm": 0.5994071354647184, "model.decoder.layers.3.final_layer_norm": 0.46284601323922103, "model.decoder.layers.4.final_layer_norm": 0.5087763514933797, "model.decoder.layers.5.final_layer_norm": 0.5348684589040256, "model.decoder.layers.6.final_layer_norm": 0.5873251914226254, "model.decoder.layers.7.final_layer_norm": 0.655958993094573, "model.decoder.layers.8.final_layer_norm": 0.814051132275788, "model.decoder.layers.9.final_layer_norm": 0.9984980142119612, "model.decoder.layers.10.final_layer_norm": 1.5374041555680895, "model.decoder.layers.11.final_layer_norm": 1.5156749826808207, "model.decoder.layers.0.self_attn.out_proj": 0.2508772022529335, "model.decoder.layers.1.self_attn.out_proj": 0.24341993913267063, "model.decoder.layers.2.self_attn.out_proj": 0.07271888871446504, "model.decoder.layers.3.self_attn.out_proj": 0.1664157649731667, "model.decoder.layers.4.self_attn.out_proj": 0.1435992467264257, "model.decoder.layers.5.self_attn.out_proj": 0.12288297165847165, "model.decoder.layers.6.self_attn.out_proj": 0.20423455739299612, "model.decoder.layers.7.self_attn.out_proj": 0.2692112508296348, "model.decoder.layers.8.self_attn.out_proj": 0.4702520902364016, "model.decoder.layers.9.self_attn.out_proj": 0.7821362368018402, "model.decoder.layers.10.self_attn.out_proj": 1.6762599524511592, "model.decoder.layers.11.self_attn.out_proj": 2.955877756794139, "model.decoder.layers.0.self_attn_layer_norm": 4.502585898500163, "model.decoder.layers.1.self_attn_layer_norm": 4.977919626135461, "model.decoder.layers.2.self_attn_layer_norm": 6.711245270361387, "model.decoder.layers.3.self_attn_layer_norm": 9.585455415352463, "model.decoder.layers.4.self_attn_layer_norm": 7.619464027974392, "model.decoder.layers.5.self_attn_layer_norm": 6.711193338558368, "model.decoder.layers.6.self_attn_layer_norm": 7.448162097836592, "model.decoder.layers.7.self_attn_layer_norm": 6.470245250931973, "model.decoder.layers.8.self_attn_layer_norm": 5.9982524145438045, "model.decoder.layers.9.self_attn_layer_norm": 5.239535203123835, "model.decoder.layers.10.self_attn_layer_norm": 5.41346654508519, "model.decoder.layers.11.self_attn_layer_norm": 6.829138810687183, "max_inf_norm": 90.96276274729331, "max_ffn_inf_norm": 0.4336998329416942, "max_layer_inf_norm": 7.57078872866513, "avg_kurtosis": 11.205696386600962, "max_kurtosis": 111.01246542132782, "max_kurtosis_layers": 31.2915544352952}