datasetId
stringlengths 5
121
| author
stringlengths 2
42
| last_modified
unknown | downloads
int64 0
2.54M
| likes
int64 0
6.35k
| tags
sequencelengths 1
7.92k
| task_categories
sequencelengths 0
40
⌀ | createdAt
unknown | card
stringlengths 19
1M
|
---|---|---|---|---|---|---|---|---|
ganga4364/benchmark_AB | ganga4364 | "2024-11-19T10:29:33Z" | 9 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T10:29:27Z" | ---
dataset_info:
features:
- name: file_name
dtype: string
- name: uni
dtype: string
- name: wylie
dtype: string
- name: url
dtype: string
- name: dept
dtype: string
- name: grade
dtype: int64
- name: char_len
dtype: int64
- name: audio_len
dtype: float64
- name: Original ID
dtype: string
- name: Collection
dtype: string
- name: Book Title
dtype: string
- name: Category
dtype: string
- name: Recorder
dtype: string
- name: Music
dtype: bool
- name: Name
dtype: string
- name: Gender
dtype: string
- name: Age
dtype: float64
- name: place
dtype: string
- name: livelihood
dtype: string
- name: birh place
dtype: string
- name: social/marital status
dtype: string
- name: education level
dtype: string
splits:
- name: Name
num_bytes: 1204827
num_examples: 1607
- name: Gender
num_bytes: 1353162
num_examples: 2000
- name: Age
num_bytes: 1139667
num_examples: 1680
- name: place
num_bytes: 1245958
num_examples: 1809
- name: Music
num_bytes: 1343345
num_examples: 2000
download_size: 1968630
dataset_size: 6286959
configs:
- config_name: default
data_files:
- split: Name
path: data/Name-*
- split: Gender
path: data/Gender-*
- split: Age
path: data/Age-*
- split: place
path: data/place-*
- split: Music
path: data/Music-*
---
|
davidberenstein1957/img_prefs_style | davidberenstein1957 | "2024-11-19T11:01:53Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T11:01:51Z" | ---
dataset_info:
features:
- name: quality_prompt
dtype: string
- name: category
dtype: string
- name: subcategory
dtype: string
- name: style_prompt
dtype: string
- name: simplified_prompt
dtype: string
- name: image_quality_dev
struct:
- name: path
dtype: string
- name: grouped_model_name
sequence: string
- name: prompt
dtype: string
- name: image_simplified_dev
struct:
- name: path
dtype: string
- name: image_quality_sd
struct:
- name: path
dtype: string
- name: image_simplified_sd
struct:
- name: path
dtype: string
- name: distilabel_metadata
struct:
- name: raw_input_image_generation_0
struct:
- name: prompt
dtype: string
- name: raw_input_image_generation_1
struct:
- name: prompt
dtype: string
- name: raw_input_image_generation_2
struct:
- name: prompt
dtype: string
- name: raw_input_image_generation_3
struct:
- name: prompt
dtype: string
- name: raw_output_image_generation_0
struct:
- name: image
dtype: string
- name: raw_output_image_generation_1
struct:
- name: image
dtype: string
- name: raw_output_image_generation_2
struct:
- name: image
dtype: string
- name: raw_output_image_generation_3
struct:
- name: image
dtype: string
splits:
- name: train
num_bytes: 454895
num_examples: 1
download_size: 476119
dataset_size: 454895
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Lakshay1Dagar/marketing_prompts_v2 | Lakshay1Dagar | "2024-11-19T11:34:11Z" | 9 | 0 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T11:33:58Z" | ---
license: apache-2.0
---
|
ipeteclarke/simplecsv | ipeteclarke | "2024-11-19T11:41:41Z" | 9 | 0 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:csv",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T11:40:32Z" | ---
license: apache-2.0
---
|
QuanHoangNgoc/palids-normal-expension-Runtest | QuanHoangNgoc | "2024-11-19T14:26:41Z" | 9 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T14:25:47Z" | ---
dataset_info:
features:
- name: answer
dtype: string
- name: cluster
dtype: int32
- name: image
dtype: image
- name: prompt
dtype: string
- name: description
dtype: string
splits:
- name: li_train
num_bytes: 901224571.5
num_examples: 5820
- name: li_test
num_bytes: 355286984.125
num_examples: 2495
download_size: 1239511186
dataset_size: 1256511555.625
configs:
- config_name: default
data_files:
- split: li_train
path: data/li_train-*
- split: li_test
path: data/li_test-*
---
|
Cywilke/KUNMBot-nazwy | Cywilke | "2024-11-19T14:51:49Z" | 9 | 0 | [
"license:wtfpl",
"size_categories:n<1K",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T14:29:14Z" | ---
license: wtfpl
---
|
VargheseP/palgo_ellipse_new_train | VargheseP | "2024-11-19T14:41:58Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T14:37:43Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: caption_basic
dtype: string
- name: caption_artsy
dtype: string
- name: caption_wt_parts
dtype: string
- name: conditioning_image
dtype: image
- name: mask_image
dtype: image
splits:
- name: train
num_bytes: 956894860.975
num_examples: 22885
download_size: 539670372
dataset_size: 956894860.975
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Lambdaquadro/train_tickets | Lambdaquadro | "2024-11-19T16:14:10Z" | 9 | 0 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T16:12:27Z" | ---
license: apache-2.0
---
|
Lambdaquadro/abc | Lambdaquadro | "2024-11-19T16:19:40Z" | 9 | 0 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:csv",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T16:18:33Z" | ---
license: apache-2.0
---
|
1231czx/rebuttal_eaf_rm_bon64_02 | 1231czx | "2024-11-19T16:55:32Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T16:55:31Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 1274744
num_examples: 805
download_size: 767450
dataset_size: 1274744
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
1231czx/rebuttal_af_rrm_bon64_05 | 1231czx | "2024-11-19T16:55:37Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T16:55:36Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 1486565
num_examples: 805
download_size: 887873
dataset_size: 1486565
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
1231czx/rebuttal_eaf_rm_bon64_05 | 1231czx | "2024-11-19T16:55:38Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T16:55:38Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 1278850
num_examples: 805
download_size: 768242
dataset_size: 1278850
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
1231czx/rebuttal_af_rrm_bon8_01 | 1231czx | "2024-11-19T16:55:49Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T16:55:48Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 1487902
num_examples: 805
download_size: 885159
dataset_size: 1487902
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
1231czx/rebuttal_eaf_rrm_bon8_02 | 1231czx | "2024-11-19T16:55:52Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T16:55:51Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 1484897
num_examples: 805
download_size: 884263
dataset_size: 1484897
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
1231czx/rebuttal_eaf_rm_bon8_05 | 1231czx | "2024-11-19T16:56:04Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T16:56:03Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 1339610
num_examples: 805
download_size: 807798
dataset_size: 1339610
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
eminorhan/h2 | eminorhan | "2024-11-19T17:37:31Z" | 9 | 0 | [
"license:cc0-1.0",
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T17:32:28Z" | ---
license: cc0-1.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: sentence
dtype: string
- name: tx_feat
sequence:
sequence: uint8
- name: block_id
dtype: int64
- name: day_id
dtype: uint8
splits:
- name: train
num_bytes: 546738742
num_examples: 1315
download_size: 126794346
dataset_size: 546738742
---
|
decimalist/cardano-blockchain | decimalist | "2024-11-19T18:17:41Z" | 9 | 0 | [
"license:mit",
"region:us"
] | null | "2024-11-19T18:17:41Z" | ---
license: mit
---
|
henryeram/finetuning_demo | henryeram | "2024-11-19T18:39:28Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T18:39:26Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
splits:
- name: train
num_bytes: 545
num_examples: 2
download_size: 2631
dataset_size: 545
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
En1gma02/hindi_speech_10h | En1gma02 | "2024-11-19T18:55:19Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T18:41:24Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: text
dtype: string
- name: gender
dtype:
class_label:
names:
'0': female
'1': male
splits:
- name: train
num_bytes: 9067255387.0
num_examples: 11825
download_size: 8210450710
dataset_size: 9067255387.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
weijiezz/test_prm_data | weijiezz | "2024-11-19T18:42:42Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T18:42:39Z" | ---
dataset_info:
features:
- name: input
dtype: string
- name: label
dtype: string
- name: value
sequence: string
splits:
- name: train
num_bytes: 158849
num_examples: 24
download_size: 76968
dataset_size: 158849
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Yuanxin-Liu/Check | Yuanxin-Liu | "2024-11-19T18:54:30Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T18:52:43Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: type
dtype: string
- name: solution
dtype: string
- name: original_question
dtype: string
- name: idx
dtype: int64
- name: prompt
dtype: string
- name: answer
dtype: string
splits:
- name: train
num_bytes: 55129651
num_examples: 20831
download_size: 15944553
dataset_size: 55129651
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Cferg76/Corpus | Cferg76 | "2024-11-19T19:37:56Z" | 9 | 0 | [
"license:unknown",
"region:us"
] | null | "2024-11-19T19:36:32Z" | ---
license: unknown
---
from datasets import load_dataset
ds = load_dataset("PleIAs/common_corpus") |
unclemusclez/17LandsDSK-Replays | unclemusclez | "2024-11-19T19:45:44Z" | 9 | 0 | [
"license:cc-by-4.0",
"region:us"
] | null | "2024-11-19T19:40:31Z" | ---
license: cc-by-4.0
---
|
TianqiLiuAI/rm_bo8_gem2b_gem2b_alpacaeval2 | TianqiLiuAI | "2024-11-19T19:44:47Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T19:44:46Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 153018
num_examples: 805
download_size: 90227
dataset_size: 153018
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
TianqiLiuAI/rm_bo64_gem2b_gem2b_alpacaeval2 | TianqiLiuAI | "2024-11-19T19:44:48Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T19:44:47Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 153825
num_examples: 805
download_size: 90217
dataset_size: 153825
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
TianqiLiuAI/rm_bo64_gem2b_gemma_2_2b_alpacaeval2 | TianqiLiuAI | "2024-11-19T19:45:51Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T19:45:50Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 153825
num_examples: 805
download_size: 90217
dataset_size: 153825
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
TianqiLiuAI/rrm_bo8_gem2b_gemma_2_2b_alpacaeval2 | TianqiLiuAI | "2024-11-19T19:45:52Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T19:45:51Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 153828
num_examples: 805
download_size: 90232
dataset_size: 153828
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
TianqiLiuAI/rrm_bo64_gem2b_gemma_2_2b_alpacaeval2 | TianqiLiuAI | "2024-11-19T19:45:53Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T19:45:53Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 154633
num_examples: 805
download_size: 90232
dataset_size: 154633
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
mhivanov123/metaqa-3hop | mhivanov123 | "2024-11-19T20:47:06Z" | 9 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T20:32:46Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: question
dtype: string
- name: entities
sequence: int64
- name: answers
list:
- name: text
dtype: string
- name: kb_id
dtype: int64
- name: subgraph
struct:
- name: tuples
sequence:
sequence: int64
- name: entities
sequence: int64
- name: answer
sequence: string
- name: a_entity
sequence: string
- name: q_entity
sequence: string
- name: graph
sequence:
sequence: string
- name: choices
sequence: 'null'
splits:
- name: train
num_bytes: 10545005875
num_examples: 114196
- name: validation
num_bytes: 1316234666
num_examples: 14274
- name: test
num_bytes: 1318592825
num_examples: 14274
download_size: 2013669793
dataset_size: 13179833366
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
d0m/manu_ai_demov2_florence | d0m | "2024-11-19T20:45:11Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T20:44:29Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 2030867.0
num_examples: 6
download_size: 2032904
dataset_size: 2030867.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
hardesttype/nlp_task | hardesttype | "2024-11-19T21:09:17Z" | 9 | 0 | [
"license:apache-2.0",
"region:us"
] | null | "2024-11-19T21:09:17Z" | ---
license: apache-2.0
---
|
1231czx/rebuttal_rm_testbon_n8 | 1231czx | "2024-11-19T21:21:56Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T21:21:55Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
- name: generator
dtype: string
splits:
- name: train
num_bytes: 1598530
num_examples: 805
download_size: 955637
dataset_size: 1598530
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc-details | open-llm-leaderboard | "2024-11-19T21:31:30Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T21:27:59Z" | ---
pretty_name: Evaluation run of cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc](https://huggingface.co/cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc-details\"\
,\n\tname=\"cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T21-27-59.247727](https://huggingface.co/datasets/open-llm-leaderboard/cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc-details/blob/main/cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc/results_2024-11-19T21-27-59.247727.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.36619535607731224,\n \"acc_norm_stderr,none\"\
: 0.0052734901127333995,\n \"prompt_level_loose_acc,none\": 0.08317929759704251,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.01188373476400857,\n \
\ \"inst_level_strict_acc,none\": 0.17146282973621102,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"acc,none\": 0.25556848404255317,\n \"acc_stderr,none\"\
: 0.003976629780041046,\n \"prompt_level_strict_acc,none\": 0.07024029574861368,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.010997187906692914,\n \
\ \"exact_match,none\": 0.004531722054380665,\n \"exact_match_stderr,none\"\
: 0.0018490874508892755,\n \"inst_level_loose_acc,none\": 0.19184652278177458,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.37858010762020483,\n \"acc_norm_stderr,none\": 0.006105180850940205,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.764,\n \"acc_norm_stderr,none\": 0.026909337594953852\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.5294117647058824,\n\
\ \"acc_norm_stderr,none\": 0.03659829510813266\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.456,\n\
\ \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\":\
\ 0.03167708558254714\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.552,\n \
\ \"acc_norm_stderr,none\": 0.03151438761115348\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\":\
\ 0.02857695873043744\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\":\
\ 0.02857695873043744\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.416,\n \"acc_norm_stderr,none\":\
\ 0.031235856237014505\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \
\ \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \"\
\ - leaderboard_bbh_navigate\",\n \"acc_norm,none\": 0.42,\n \
\ \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.2054794520547945,\n \"acc_norm_stderr,none\": 0.03355465401072847\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.248,\n \
\ \"acc_norm_stderr,none\": 0.027367497504863593\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4550561797752809,\n \"acc_norm_stderr,none\"\
: 0.03743016495716991\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.256,\n\
\ \"acc_norm_stderr,none\": 0.027657108718204846\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\":\
\ 0.021172081336336534\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.292,\n \"acc_norm_stderr,none\":\
\ 0.02881432040220563\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2651006711409396,\n\
\ \"acc_norm_stderr,none\": 0.012799282786329112,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2777777777777778,\n \"acc_norm_stderr,none\": 0.03191178226713548\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2600732600732601,\n\
\ \"acc_norm_stderr,none\": 0.018790743352015988\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.265625,\n \"acc_norm_stderr,none\"\
: 0.02089005840079951\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.07024029574861368,\n \"prompt_level_strict_acc_stderr,none\": 0.010997187906692914,\n\
\ \"inst_level_strict_acc,none\": 0.17146282973621102,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.08317929759704251,\n \"prompt_level_loose_acc_stderr,none\": 0.01188373476400857,\n\
\ \"inst_level_loose_acc,none\": 0.19184652278177458,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.004531722054380665,\n \"exact_match_stderr,none\"\
: 0.0018490874508892755,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.003257328990228013,\n\
\ \"exact_match_stderr,none\": 0.003257328990228013\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.007575757575757576,\n\
\ \"exact_match_stderr,none\": 0.007575757575757577\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"alias\": \" - leaderboard_math_num_theory_hard\",\n \
\ \"exact_match,none\": 0.006493506493506494,\n \"exact_match_stderr,none\"\
: 0.006493506493506494\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0051813471502590676,\n \"exact_match_stderr,none\"\
: 0.0051813471502590676\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.007407407407407408,\n \"exact_match_stderr,none\"\
: 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.25556848404255317,\n\
\ \"acc_stderr,none\": 0.003976629780041046\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4312169312169312,\n \"acc_norm_stderr,none\"\
: 0.01788862311572941,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.512,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.3515625,\n \"acc_norm_stderr,none\"\
: 0.029899590697818237\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ }\n },\n \"leaderboard\": {\n \"acc_norm,none\": 0.36619535607731224,\n\
\ \"acc_norm_stderr,none\": 0.0052734901127333995,\n \"prompt_level_loose_acc,none\"\
: 0.08317929759704251,\n \"prompt_level_loose_acc_stderr,none\": 0.01188373476400857,\n\
\ \"inst_level_strict_acc,none\": 0.17146282973621102,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"acc,none\": 0.25556848404255317,\n \"acc_stderr,none\"\
: 0.003976629780041046,\n \"prompt_level_strict_acc,none\": 0.07024029574861368,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.010997187906692914,\n \
\ \"exact_match,none\": 0.004531722054380665,\n \"exact_match_stderr,none\"\
: 0.0018490874508892755,\n \"inst_level_loose_acc,none\": 0.19184652278177458,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"alias\": \"leaderboard\"\
\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.37858010762020483,\n\
\ \"acc_norm_stderr,none\": 0.006105180850940205,\n \"alias\": \"\
\ - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.764,\n \"acc_norm_stderr,none\": 0.026909337594953852\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5294117647058824,\n \"acc_norm_stderr,none\"\
: 0.03659829510813266\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.456,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.416,\n \"acc_norm_stderr,none\": 0.031235856237014505\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2054794520547945,\n\
\ \"acc_norm_stderr,none\": 0.03355465401072847\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.248,\n \"acc_norm_stderr,none\": 0.027367497504863593\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4550561797752809,\n \"acc_norm_stderr,none\"\
: 0.03743016495716991\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \"\
acc_norm,none\": 0.256,\n \"acc_norm_stderr,none\": 0.027657108718204846\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.292,\n \"acc_norm_stderr,none\": 0.02881432040220563\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2651006711409396,\n\
\ \"acc_norm_stderr,none\": 0.012799282786329112,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2777777777777778,\n\
\ \"acc_norm_stderr,none\": 0.03191178226713548\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2600732600732601,\n \"acc_norm_stderr,none\": 0.018790743352015988\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.265625,\n \"acc_norm_stderr,none\": 0.02089005840079951\n\
\ },\n \"leaderboard_ifeval\": {\n \"alias\": \" - leaderboard_ifeval\"\
,\n \"prompt_level_strict_acc,none\": 0.07024029574861368,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.010997187906692914,\n \"inst_level_strict_acc,none\": 0.17146282973621102,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.08317929759704251,\n \"prompt_level_loose_acc_stderr,none\": 0.01188373476400857,\n\
\ \"inst_level_loose_acc,none\": 0.19184652278177458,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.004531722054380665,\n \"exact_match_stderr,none\": 0.0018490874508892755,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.003257328990228013,\n \"exact_match_stderr,none\": 0.003257328990228013\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.007575757575757576,\n \"exact_match_stderr,none\"\
: 0.007575757575757577\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.006493506493506494,\n \"exact_match_stderr,none\"\
: 0.006493506493506494\n },\n \"leaderboard_math_prealgebra_hard\": {\n \
\ \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \"exact_match,none\"\
: 0.0051813471502590676,\n \"exact_match_stderr,none\": 0.0051813471502590676\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.007407407407407408,\n\
\ \"exact_match_stderr,none\": 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.25556848404255317,\n\
\ \"acc_stderr,none\": 0.003976629780041046\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4312169312169312,\n \"acc_norm_stderr,none\"\
: 0.01788862311572941,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.3515625,\n\
\ \"acc_norm_stderr,none\": 0.029899590697818237\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n }\n}\n```"
repo_url: https://huggingface.co/cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_ifeval
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_ifeval_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T21-27-59.247727.jsonl'
- config_name: cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T21_27_59.247727
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T21-27-59.247727.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T21-27-59.247727.jsonl'
---
# Dataset Card for Evaluation run of cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc](https://huggingface.co/cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc-details",
name="cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T21-27-59.247727](https://huggingface.co/datasets/open-llm-leaderboard/cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc-details/blob/main/cluebbers__Llama-3.1-8B-paraphrase-type-generation-etpc/results_2024-11-19T21-27-59.247727.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.36619535607731224,
"acc_norm_stderr,none": 0.0052734901127333995,
"prompt_level_loose_acc,none": 0.08317929759704251,
"prompt_level_loose_acc_stderr,none": 0.01188373476400857,
"inst_level_strict_acc,none": 0.17146282973621102,
"inst_level_strict_acc_stderr,none": "N/A",
"acc,none": 0.25556848404255317,
"acc_stderr,none": 0.003976629780041046,
"prompt_level_strict_acc,none": 0.07024029574861368,
"prompt_level_strict_acc_stderr,none": 0.010997187906692914,
"exact_match,none": 0.004531722054380665,
"exact_match_stderr,none": 0.0018490874508892755,
"inst_level_loose_acc,none": 0.19184652278177458,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.37858010762020483,
"acc_norm_stderr,none": 0.006105180850940205,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.764,
"acc_norm_stderr,none": 0.026909337594953852
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5294117647058824,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2054794520547945,
"acc_norm_stderr,none": 0.03355465401072847
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.248,
"acc_norm_stderr,none": 0.027367497504863593
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4550561797752809,
"acc_norm_stderr,none": 0.03743016495716991
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.256,
"acc_norm_stderr,none": 0.027657108718204846
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.292,
"acc_norm_stderr,none": 0.02881432040220563
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2651006711409396,
"acc_norm_stderr,none": 0.012799282786329112,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2777777777777778,
"acc_norm_stderr,none": 0.03191178226713548
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2600732600732601,
"acc_norm_stderr,none": 0.018790743352015988
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.265625,
"acc_norm_stderr,none": 0.02089005840079951
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.07024029574861368,
"prompt_level_strict_acc_stderr,none": 0.010997187906692914,
"inst_level_strict_acc,none": 0.17146282973621102,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.08317929759704251,
"prompt_level_loose_acc_stderr,none": 0.01188373476400857,
"inst_level_loose_acc,none": 0.19184652278177458,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.004531722054380665,
"exact_match_stderr,none": 0.0018490874508892755,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.25556848404255317,
"acc_stderr,none": 0.003976629780041046
},
"leaderboard_musr": {
"acc_norm,none": 0.4312169312169312,
"acc_norm_stderr,none": 0.01788862311572941,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.3515625,
"acc_norm_stderr,none": 0.029899590697818237
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
}
},
"leaderboard": {
"acc_norm,none": 0.36619535607731224,
"acc_norm_stderr,none": 0.0052734901127333995,
"prompt_level_loose_acc,none": 0.08317929759704251,
"prompt_level_loose_acc_stderr,none": 0.01188373476400857,
"inst_level_strict_acc,none": 0.17146282973621102,
"inst_level_strict_acc_stderr,none": "N/A",
"acc,none": 0.25556848404255317,
"acc_stderr,none": 0.003976629780041046,
"prompt_level_strict_acc,none": 0.07024029574861368,
"prompt_level_strict_acc_stderr,none": 0.010997187906692914,
"exact_match,none": 0.004531722054380665,
"exact_match_stderr,none": 0.0018490874508892755,
"inst_level_loose_acc,none": 0.19184652278177458,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.37858010762020483,
"acc_norm_stderr,none": 0.006105180850940205,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.764,
"acc_norm_stderr,none": 0.026909337594953852
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5294117647058824,
"acc_norm_stderr,none": 0.03659829510813266
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2054794520547945,
"acc_norm_stderr,none": 0.03355465401072847
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.248,
"acc_norm_stderr,none": 0.027367497504863593
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4550561797752809,
"acc_norm_stderr,none": 0.03743016495716991
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.256,
"acc_norm_stderr,none": 0.027657108718204846
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.292,
"acc_norm_stderr,none": 0.02881432040220563
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2651006711409396,
"acc_norm_stderr,none": 0.012799282786329112,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2777777777777778,
"acc_norm_stderr,none": 0.03191178226713548
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2600732600732601,
"acc_norm_stderr,none": 0.018790743352015988
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.265625,
"acc_norm_stderr,none": 0.02089005840079951
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.07024029574861368,
"prompt_level_strict_acc_stderr,none": 0.010997187906692914,
"inst_level_strict_acc,none": 0.17146282973621102,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.08317929759704251,
"prompt_level_loose_acc_stderr,none": 0.01188373476400857,
"inst_level_loose_acc,none": 0.19184652278177458,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.004531722054380665,
"exact_match_stderr,none": 0.0018490874508892755,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.25556848404255317,
"acc_stderr,none": 0.003976629780041046
},
"leaderboard_musr": {
"acc_norm,none": 0.4312169312169312,
"acc_norm_stderr,none": 0.01788862311572941,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.3515625,
"acc_norm_stderr,none": 0.029899590697818237
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
moon946129/cocorob | moon946129 | "2024-11-19T21:32:33Z" | 9 | 0 | [
"license:openrail",
"size_categories:n<1K",
"format:audiofolder",
"modality:audio",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-11-19T21:31:14Z" | ---
license: openrail
---
|
open-llm-leaderboard/postbot__gpt2-medium-emailgen-details | open-llm-leaderboard | "2024-11-19T23:16:29Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:13:08Z" | ---
pretty_name: Evaluation run of postbot/gpt2-medium-emailgen
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [postbot/gpt2-medium-emailgen](https://huggingface.co/postbot/gpt2-medium-emailgen)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/postbot__gpt2-medium-emailgen-details\"\
,\n\tname=\"postbot__gpt2-medium-emailgen__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-13-07.268857](https://huggingface.co/datasets/open-llm-leaderboard/postbot__gpt2-medium-emailgen-details/blob/main/postbot__gpt2-medium-emailgen/results_2024-11-19T23-13-07.268857.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.19304556354916066,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\": 0.3108055519522636,\n\
\ \"acc_norm_stderr,none\": 0.005029199223841951,\n \"prompt_level_strict_acc,none\"\
: 0.10536044362292052,\n \"prompt_level_strict_acc_stderr,none\": 0.01321192086172503,\n\
\ \"acc,none\": 0.1146941489361702,\n \"acc_stderr,none\"\
: 0.0029051357814625332,\n \"exact_match,none\": 0.0,\n \"\
exact_match_stderr,none\": 0.0,\n \"prompt_level_loose_acc,none\": 0.10536044362292052,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.01321192086172503,\n \
\ \"inst_level_loose_acc,none\": 0.19664268585131894,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.31088352716542267,\n \"acc_norm_stderr,none\"\
: 0.005760202058622169,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.544,\n\
\ \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.47593582887700536,\n \"acc_norm_stderr,none\"\
: 0.03661929361528698\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.344,\n\
\ \"acc_norm_stderr,none\": 0.03010450339231644\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\":\
\ 0.031621252575725574\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.516,\n \
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n\
\ \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.308,\n \"acc_norm_stderr,none\": 0.02925692860650181\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.264,\n \"acc_norm_stderr,none\": 0.027934518957690866\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.536,\n \"acc_norm_stderr,none\":\
\ 0.031603975145223735\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.068,\n \"acc_norm_stderr,none\": 0.015953748410747037\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.21232876712328766,\n \"acc_norm_stderr,none\": 0.03396197282917473\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.26,\n \
\ \"acc_norm_stderr,none\": 0.027797315752644335\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.4887640449438202,\n\
\ \"acc_norm_stderr,none\": 0.03757281091983857\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.284,\n\
\ \"acc_norm_stderr,none\": 0.02857695873043744\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\":\
\ 0.022249407735450245\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.324,\n \"acc_norm_stderr,none\":\
\ 0.029658294924545567\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.536,\n \"acc_norm_stderr,none\": 0.031603975145223735\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2600671140939597,\n\
\ \"acc_norm_stderr,none\": 0.01271729165731483,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.26262626262626265,\n \"acc_norm_stderr,none\": 0.031353050095330834\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.27106227106227104,\n\
\ \"acc_norm_stderr,none\": 0.01904063815660353\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.24553571428571427,\n \"acc_norm_stderr,none\"\
: 0.020357428454484603\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.10536044362292052,\n \"prompt_level_strict_acc_stderr,none\": 0.013211920861725028,\n\
\ \"inst_level_strict_acc,none\": 0.19304556354916066,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.10536044362292052,\n \"prompt_level_loose_acc_stderr,none\": 0.013211920861725028,\n\
\ \"inst_level_loose_acc,none\": 0.19664268585131894,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_hard\"\n },\n \
\ \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\"\
: \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\":\
\ 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\"\
: 0.1146941489361702,\n \"acc_stderr,none\": 0.0029051357814625332\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.39021164021164023,\n\
\ \"acc_norm_stderr,none\": 0.017353346774513254,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.27734375,\n\
\ \"acc_norm_stderr,none\": 0.02803528549328419\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\":\
\ 0.030491555220405475\n }\n },\n \"leaderboard\": {\n \"inst_level_strict_acc,none\"\
: 0.19304556354916066,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"acc_norm,none\": 0.3108055519522636,\n \"acc_norm_stderr,none\"\
: 0.005029199223841951,\n \"prompt_level_strict_acc,none\": 0.10536044362292052,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01321192086172503,\n \
\ \"acc,none\": 0.1146941489361702,\n \"acc_stderr,none\": 0.0029051357814625332,\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"prompt_level_loose_acc,none\": 0.10536044362292052,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.01321192086172503,\n \"inst_level_loose_acc,none\": 0.19664268585131894,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"alias\": \"leaderboard\"\
\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.31088352716542267,\n\
\ \"acc_norm_stderr,none\": 0.005760202058622169,\n \"alias\": \"\
\ - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.47593582887700536,\n \"acc_norm_stderr,none\"\
: 0.03661929361528698\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.344,\n \"acc_norm_stderr,none\": 0.03010450339231644\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.308,\n \"acc_norm_stderr,none\": 0.02925692860650181\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.264,\n \"acc_norm_stderr,none\": 0.027934518957690866\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.536,\n \"acc_norm_stderr,none\": 0.031603975145223735\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.068,\n \"acc_norm_stderr,none\": 0.015953748410747037\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.21232876712328766,\n\
\ \"acc_norm_stderr,none\": 0.03396197282917473\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4887640449438202,\n \"acc_norm_stderr,none\"\
: 0.03757281091983857\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.324,\n \"acc_norm_stderr,none\": 0.029658294924545567\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.536,\n \"acc_norm_stderr,none\": 0.031603975145223735\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2600671140939597,\n\
\ \"acc_norm_stderr,none\": 0.01271729165731483,\n \"alias\": \" -\
\ leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"alias\"\
: \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.26262626262626265,\n\
\ \"acc_norm_stderr,none\": 0.031353050095330834\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.27106227106227104,\n \"acc_norm_stderr,none\": 0.01904063815660353\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.24553571428571427,\n \"acc_norm_stderr,none\"\
: 0.020357428454484603\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.10536044362292052,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.013211920861725028,\n \
\ \"inst_level_strict_acc,none\": 0.19304556354916066,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.10536044362292052,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.013211920861725028,\n \"inst_level_loose_acc,none\"\
: 0.19664268585131894,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n\
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_hard\"\
\n },\n \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.1146941489361702,\n\
\ \"acc_stderr,none\": 0.0029051357814625332\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.39021164021164023,\n \"acc_norm_stderr,none\"\
: 0.017353346774513254,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.27734375,\n\
\ \"acc_norm_stderr,none\": 0.02803528549328419\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.364,\n \"acc_norm_stderr,none\": 0.030491555220405475\n }\n}\n```"
repo_url: https://huggingface.co/postbot/gpt2-medium-emailgen
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-13-07.268857.jsonl'
- config_name: postbot__gpt2-medium-emailgen__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_13_07.268857
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-13-07.268857.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-13-07.268857.jsonl'
---
# Dataset Card for Evaluation run of postbot/gpt2-medium-emailgen
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [postbot/gpt2-medium-emailgen](https://huggingface.co/postbot/gpt2-medium-emailgen)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/postbot__gpt2-medium-emailgen-details",
name="postbot__gpt2-medium-emailgen__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-13-07.268857](https://huggingface.co/datasets/open-llm-leaderboard/postbot__gpt2-medium-emailgen-details/blob/main/postbot__gpt2-medium-emailgen/results_2024-11-19T23-13-07.268857.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.19304556354916066,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3108055519522636,
"acc_norm_stderr,none": 0.005029199223841951,
"prompt_level_strict_acc,none": 0.10536044362292052,
"prompt_level_strict_acc_stderr,none": 0.01321192086172503,
"acc,none": 0.1146941489361702,
"acc_stderr,none": 0.0029051357814625332,
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"prompt_level_loose_acc,none": 0.10536044362292052,
"prompt_level_loose_acc_stderr,none": 0.01321192086172503,
"inst_level_loose_acc,none": 0.19664268585131894,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31088352716542267,
"acc_norm_stderr,none": 0.005760202058622169,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47593582887700536,
"acc_norm_stderr,none": 0.03661929361528698
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.344,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.068,
"acc_norm_stderr,none": 0.015953748410747037
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.21232876712328766,
"acc_norm_stderr,none": 0.03396197282917473
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4887640449438202,
"acc_norm_stderr,none": 0.03757281091983857
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2600671140939597,
"acc_norm_stderr,none": 0.01271729165731483,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330834
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.27106227106227104,
"acc_norm_stderr,none": 0.01904063815660353
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484603
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.10536044362292052,
"prompt_level_strict_acc_stderr,none": 0.013211920861725028,
"inst_level_strict_acc,none": 0.19304556354916066,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.10536044362292052,
"prompt_level_loose_acc_stderr,none": 0.013211920861725028,
"inst_level_loose_acc,none": 0.19664268585131894,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.1146941489361702,
"acc_stderr,none": 0.0029051357814625332
},
"leaderboard_musr": {
"acc_norm,none": 0.39021164021164023,
"acc_norm_stderr,none": 0.017353346774513254,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.27734375,
"acc_norm_stderr,none": 0.02803528549328419
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.19304556354916066,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3108055519522636,
"acc_norm_stderr,none": 0.005029199223841951,
"prompt_level_strict_acc,none": 0.10536044362292052,
"prompt_level_strict_acc_stderr,none": 0.01321192086172503,
"acc,none": 0.1146941489361702,
"acc_stderr,none": 0.0029051357814625332,
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"prompt_level_loose_acc,none": 0.10536044362292052,
"prompt_level_loose_acc_stderr,none": 0.01321192086172503,
"inst_level_loose_acc,none": 0.19664268585131894,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.31088352716542267,
"acc_norm_stderr,none": 0.005760202058622169,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47593582887700536,
"acc_norm_stderr,none": 0.03661929361528698
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.344,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.068,
"acc_norm_stderr,none": 0.015953748410747037
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.21232876712328766,
"acc_norm_stderr,none": 0.03396197282917473
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4887640449438202,
"acc_norm_stderr,none": 0.03757281091983857
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2600671140939597,
"acc_norm_stderr,none": 0.01271729165731483,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330834
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.27106227106227104,
"acc_norm_stderr,none": 0.01904063815660353
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484603
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.10536044362292052,
"prompt_level_strict_acc_stderr,none": 0.013211920861725028,
"inst_level_strict_acc,none": 0.19304556354916066,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.10536044362292052,
"prompt_level_loose_acc_stderr,none": 0.013211920861725028,
"inst_level_loose_acc,none": 0.19664268585131894,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.1146941489361702,
"acc_stderr,none": 0.0029051357814625332
},
"leaderboard_musr": {
"acc_norm,none": 0.39021164021164023,
"acc_norm_stderr,none": 0.017353346774513254,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.27734375,
"acc_norm_stderr,none": 0.02803528549328419
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_006-details | open-llm-leaderboard | "2024-11-19T23:31:47Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:28:40Z" | ---
pretty_name: Evaluation run of LeroyDyer/SpydazWeb_AI_HumanAI_006
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [LeroyDyer/SpydazWeb_AI_HumanAI_006](https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_006)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_006-details\"\
,\n\tname=\"LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-28-39.826446](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_006-details/blob/main/LeroyDyer__SpydazWeb_AI_HumanAI_006/results_2024-11-19T23-28-39.826446.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.19544364508393286,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\": 0.3223504994162667,\n\
\ \"acc_norm_stderr,none\": 0.005068996697015786,\n \"acc,none\"\
: 0.11353058510638298,\n \"acc_stderr,none\": 0.0028922608083426357,\n\
\ \"exact_match,none\": 0.0022658610271903325,\n \"exact_match_stderr,none\"\
: 0.0013080403363786098,\n \"prompt_level_strict_acc,none\": 0.09057301293900184,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01235055421354707,\n \
\ \"prompt_level_loose_acc,none\": 0.09426987060998152,\n \"\
prompt_level_loose_acc_stderr,none\": 0.012574449557583826,\n \"inst_level_loose_acc,none\"\
: 0.20023980815347722,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.32667939593820516,\n \"acc_norm_stderr,none\"\
: 0.005811286009703682,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.712,\n\
\ \"acc_norm_stderr,none\": 0.028697004587398257\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.384,\n\
\ \"acc_norm_stderr,none\": 0.030821679117375447\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.56,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\"\
: \" - leaderboard_bbh_geometric_shapes\",\n \"acc_norm,none\": 0.212,\n\
\ \"acc_norm_stderr,none\": 0.025901884690541117\n },\n \
\ \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.484,\n \"acc_norm_stderr,none\":\
\ 0.03166998503010743\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\":\
\ 0.025901884690541117\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.1,\n \"acc_norm_stderr,none\": 0.01901172751573434\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.352,\n \"acc_norm_stderr,none\": 0.030266288057359866\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457873\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.24,\n\
\ \"acc_norm_stderr,none\": 0.027065293652238982\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.2534246575342466,\n \"acc_norm_stderr,none\"\
: 0.03612245461624575\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\":\
\ 0.023692813205492536\n },\n \"leaderboard_bbh_ruin_names\": {\n\
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.18,\n \"acc_norm_stderr,none\": 0.02434689065029351\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.5561797752808989,\n\
\ \"acc_norm_stderr,none\": 0.03734431584194247\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\":\
\ 0.03168215643141386\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.244,\n \"acc_norm_stderr,none\": 0.02721799546455311\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\":\
\ 0.025537121574548162\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\":\
\ 0.022752024491765464\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\":\
\ 0.029752391824475363\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2802013422818792,\n\
\ \"acc_norm_stderr,none\": 0.013022889022928454,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2878787878787879,\n \"acc_norm_stderr,none\": 0.03225883512300998\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2838827838827839,\n\
\ \"acc_norm_stderr,none\": 0.01931360450766325\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27232142857142855,\n \"acc_norm_stderr,none\"\
: 0.02105508212932411\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.09057301293900184,\n \"prompt_level_strict_acc_stderr,none\": 0.012350554213547069,\n\
\ \"inst_level_strict_acc,none\": 0.19544364508393286,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.09426987060998152,\n \"prompt_level_loose_acc_stderr,none\": 0.012574449557583826,\n\
\ \"inst_level_loose_acc,none\": 0.20023980815347722,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0022658610271903325,\n \"exact_match_stderr,none\"\
: 0.0013080403363786098,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.003257328990228013,\n\
\ \"exact_match_stderr,none\": 0.003257328990228013\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0051813471502590676,\n \"exact_match_stderr,none\"\
: 0.0051813471502590676\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.11353058510638298,\n \"acc_stderr,none\"\
: 0.0028922608083426357\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.3558201058201058,\n \"acc_norm_stderr,none\"\
: 0.017000777451244772,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.496,\n\
\ \"acc_norm_stderr,none\": 0.0316851985511992\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.234375,\n \"acc_norm_stderr,none\"\
: 0.02652733398834892\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457873\n\
\ }\n },\n \"leaderboard\": {\n \"inst_level_strict_acc,none\"\
: 0.19544364508393286,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"acc_norm,none\": 0.3223504994162667,\n \"acc_norm_stderr,none\"\
: 0.005068996697015786,\n \"acc,none\": 0.11353058510638298,\n \"\
acc_stderr,none\": 0.0028922608083426357,\n \"exact_match,none\": 0.0022658610271903325,\n\
\ \"exact_match_stderr,none\": 0.0013080403363786098,\n \"prompt_level_strict_acc,none\"\
: 0.09057301293900184,\n \"prompt_level_strict_acc_stderr,none\": 0.01235055421354707,\n\
\ \"prompt_level_loose_acc,none\": 0.09426987060998152,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.012574449557583826,\n \"inst_level_loose_acc,none\": 0.20023980815347722,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"alias\": \"leaderboard\"\
\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.32667939593820516,\n\
\ \"acc_norm_stderr,none\": 0.005811286009703682,\n \"alias\": \"\
\ - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.712,\n \"acc_norm_stderr,none\": 0.028697004587398257\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.384,\n \"acc_norm_stderr,none\": 0.030821679117375447\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.56,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\": 0.025901884690541117\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.484,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.212,\n \"acc_norm_stderr,none\": 0.025901884690541117\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.1,\n \"acc_norm_stderr,none\": 0.01901172751573434\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.352,\n \"acc_norm_stderr,none\": 0.030266288057359866\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457873\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652238982\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2534246575342466,\n\
\ \"acc_norm_stderr,none\": 0.03612245461624575\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\": 0.023692813205492536\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.18,\n \"acc_norm_stderr,none\": 0.02434689065029351\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.5561797752808989,\n \"acc_norm_stderr,none\"\
: 0.03734431584194247\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.244,\n \"acc_norm_stderr,none\": 0.02721799546455311\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2802013422818792,\n\
\ \"acc_norm_stderr,none\": 0.013022889022928454,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2878787878787879,\n\
\ \"acc_norm_stderr,none\": 0.03225883512300998\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2838827838827839,\n \"acc_norm_stderr,none\": 0.01931360450766325\n \
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27232142857142855,\n \"acc_norm_stderr,none\"\
: 0.02105508212932411\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.09057301293900184,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.012350554213547069,\n \
\ \"inst_level_strict_acc,none\": 0.19544364508393286,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.09426987060998152,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.012574449557583826,\n \"inst_level_loose_acc,none\"\
: 0.20023980815347722,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n\
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0022658610271903325,\n\
\ \"exact_match_stderr,none\": 0.0013080403363786098,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.003257328990228013,\n \"exact_match_stderr,none\": 0.003257328990228013\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \"exact_match,none\"\
: 0.0051813471502590676,\n \"exact_match_stderr,none\": 0.0051813471502590676\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.11353058510638298,\n\
\ \"acc_stderr,none\": 0.0028922608083426357\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.3558201058201058,\n \"acc_norm_stderr,none\"\
: 0.017000777451244772,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.234375,\n\
\ \"acc_norm_stderr,none\": 0.02652733398834892\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457873\n }\n}\n```"
repo_url: https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_006
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-28-39.826446.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_28_39.826446
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-28-39.826446.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-28-39.826446.jsonl'
---
# Dataset Card for Evaluation run of LeroyDyer/SpydazWeb_AI_HumanAI_006
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [LeroyDyer/SpydazWeb_AI_HumanAI_006](https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_006)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_006-details",
name="LeroyDyer__SpydazWeb_AI_HumanAI_006__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-28-39.826446](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_006-details/blob/main/LeroyDyer__SpydazWeb_AI_HumanAI_006/results_2024-11-19T23-28-39.826446.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.19544364508393286,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3223504994162667,
"acc_norm_stderr,none": 0.005068996697015786,
"acc,none": 0.11353058510638298,
"acc_stderr,none": 0.0028922608083426357,
"exact_match,none": 0.0022658610271903325,
"exact_match_stderr,none": 0.0013080403363786098,
"prompt_level_strict_acc,none": 0.09057301293900184,
"prompt_level_strict_acc_stderr,none": 0.01235055421354707,
"prompt_level_loose_acc,none": 0.09426987060998152,
"prompt_level_loose_acc_stderr,none": 0.012574449557583826,
"inst_level_loose_acc,none": 0.20023980815347722,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.32667939593820516,
"acc_norm_stderr,none": 0.005811286009703682,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.028697004587398257
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.384,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.1,
"acc_norm_stderr,none": 0.01901172751573434
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457873
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2534246575342466,
"acc_norm_stderr,none": 0.03612245461624575
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.18,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5561797752808989,
"acc_norm_stderr,none": 0.03734431584194247
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.244,
"acc_norm_stderr,none": 0.02721799546455311
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2802013422818792,
"acc_norm_stderr,none": 0.013022889022928454,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2878787878787879,
"acc_norm_stderr,none": 0.03225883512300998
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2838827838827839,
"acc_norm_stderr,none": 0.01931360450766325
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27232142857142855,
"acc_norm_stderr,none": 0.02105508212932411
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.09057301293900184,
"prompt_level_strict_acc_stderr,none": 0.012350554213547069,
"inst_level_strict_acc,none": 0.19544364508393286,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.09426987060998152,
"prompt_level_loose_acc_stderr,none": 0.012574449557583826,
"inst_level_loose_acc,none": 0.20023980815347722,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0022658610271903325,
"exact_match_stderr,none": 0.0013080403363786098,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.11353058510638298,
"acc_stderr,none": 0.0028922608083426357
},
"leaderboard_musr": {
"acc_norm,none": 0.3558201058201058,
"acc_norm_stderr,none": 0.017000777451244772,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.234375,
"acc_norm_stderr,none": 0.02652733398834892
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457873
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.19544364508393286,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3223504994162667,
"acc_norm_stderr,none": 0.005068996697015786,
"acc,none": 0.11353058510638298,
"acc_stderr,none": 0.0028922608083426357,
"exact_match,none": 0.0022658610271903325,
"exact_match_stderr,none": 0.0013080403363786098,
"prompt_level_strict_acc,none": 0.09057301293900184,
"prompt_level_strict_acc_stderr,none": 0.01235055421354707,
"prompt_level_loose_acc,none": 0.09426987060998152,
"prompt_level_loose_acc_stderr,none": 0.012574449557583826,
"inst_level_loose_acc,none": 0.20023980815347722,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.32667939593820516,
"acc_norm_stderr,none": 0.005811286009703682,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.028697004587398257
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.384,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.1,
"acc_norm_stderr,none": 0.01901172751573434
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457873
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2534246575342466,
"acc_norm_stderr,none": 0.03612245461624575
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.18,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5561797752808989,
"acc_norm_stderr,none": 0.03734431584194247
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.244,
"acc_norm_stderr,none": 0.02721799546455311
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2802013422818792,
"acc_norm_stderr,none": 0.013022889022928454,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2878787878787879,
"acc_norm_stderr,none": 0.03225883512300998
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2838827838827839,
"acc_norm_stderr,none": 0.01931360450766325
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27232142857142855,
"acc_norm_stderr,none": 0.02105508212932411
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.09057301293900184,
"prompt_level_strict_acc_stderr,none": 0.012350554213547069,
"inst_level_strict_acc,none": 0.19544364508393286,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.09426987060998152,
"prompt_level_loose_acc_stderr,none": 0.012574449557583826,
"inst_level_loose_acc,none": 0.20023980815347722,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0022658610271903325,
"exact_match_stderr,none": 0.0013080403363786098,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.11353058510638298,
"acc_stderr,none": 0.0028922608083426357
},
"leaderboard_musr": {
"acc_norm,none": 0.3558201058201058,
"acc_norm_stderr,none": 0.017000777451244772,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.234375,
"acc_norm_stderr,none": 0.02652733398834892
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457873
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_007-details | open-llm-leaderboard | "2024-11-19T23:31:46Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:28:40Z" | ---
pretty_name: Evaluation run of LeroyDyer/SpydazWeb_AI_HumanAI_007
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [LeroyDyer/SpydazWeb_AI_HumanAI_007](https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_007)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_007-details\"\
,\n\tname=\"LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-28-39.555176](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_007-details/blob/main/LeroyDyer__SpydazWeb_AI_HumanAI_007/results_2024-11-19T23-28-39.555176.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.40047961630695444,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"inst_level_loose_acc,none\"\
: 0.44004796163069543,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"prompt_level_strict_acc,none\": 0.2698706099815157,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.019102087526494387,\n \
\ \"prompt_level_loose_acc,none\": 0.31608133086876156,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.020008050377239083,\n \"acc_norm,none\": 0.3384355947593722,\n \
\ \"acc_norm_stderr,none\": 0.005080787604160283,\n \"exact_match,none\"\
: 0.015105740181268883,\n \"exact_match_stderr,none\": 0.0033496871269877494,\n\
\ \"acc,none\": 0.13522273936170212,\n \"acc_stderr,none\"\
: 0.0031176412001567644,\n \"alias\": \"leaderboard\"\n },\n \
\ \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.33952438812706126,\n\
\ \"acc_norm_stderr,none\": 0.00578708020278095,\n \"alias\"\
: \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.712,\n \"acc_norm_stderr,none\": 0.028697004587398257\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.5187165775401069,\n\
\ \"acc_norm_stderr,none\": 0.03663608375537843\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.264,\n \"acc_norm_stderr,none\":\
\ 0.027934518957690866\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.608,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.532,\n\
\ \"acc_norm_stderr,none\": 0.031621252575725574\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.076,\n \"acc_norm_stderr,none\":\
\ 0.01679357306785969\n },\n \"leaderboard_bbh_hyperbaton\": {\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255664\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750513\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.288,\n\
\ \"acc_norm_stderr,none\": 0.028697004587398253\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.22602739726027396,\n \"acc_norm_stderr,none\"\
: 0.034734362688347356\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\":\
\ 0.02346526100207671\n },\n \"leaderboard_bbh_ruin_names\": {\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.18,\n \"acc_norm_stderr,none\": 0.02434689065029351\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.112,\n \"acc_norm_stderr,none\": 0.019985536939171485\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.5168539325842697,\n\
\ \"acc_norm_stderr,none\": 0.037560944447344834\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\":\
\ 0.031621252575725574\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652238982\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\":\
\ 0.02391551394448624\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\":\
\ 0.022995023034068682\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\":\
\ 0.029844039047465857\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.28859060402684567,\n\
\ \"acc_norm_stderr,none\": 0.013138520572196355,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2727272727272727,\n \"acc_norm_stderr,none\": 0.03173071239071728\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2948717948717949,\n\
\ \"acc_norm_stderr,none\": 0.01953225605335253\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28794642857142855,\n \"acc_norm_stderr,none\"\
: 0.021416989369571725\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.2698706099815157,\n \"prompt_level_strict_acc_stderr,none\": 0.019102087526494387,\n\
\ \"inst_level_strict_acc,none\": 0.40047961630695444,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.31608133086876156,\n \"prompt_level_loose_acc_stderr,none\": 0.020008050377239083,\n\
\ \"inst_level_loose_acc,none\": 0.44004796163069543,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.015105740181268883,\n \"exact_match_stderr,none\"\
: 0.0033496871269877494,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.013029315960912053,\n\
\ \"exact_match_stderr,none\": 0.006482644725390246\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.016260162601626018,\n \"exact_match_stderr,none\": 0.011450452676925654\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.022727272727272728,\n\
\ \"exact_match_stderr,none\": 0.0130210469090637\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\": \"\
\ - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.007142857142857143,\n \"exact_match_stderr,none\": 0.005041703051390571\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.006493506493506494,\n\
\ \"exact_match_stderr,none\": 0.006493506493506494\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.03626943005181347,\n \"exact_match_stderr,none\"\
: 0.013492659751295115\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.007407407407407408,\n \"exact_match_stderr,none\"\
: 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.13522273936170212,\n\
\ \"acc_stderr,none\": 0.003117641200156765\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4087301587301587,\n \"acc_norm_stderr,none\"\
: 0.017613620896423727,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.516,\n\
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.296875,\n \"acc_norm_stderr,none\"\
: 0.028610997088737832\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.416,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ }\n },\n \"leaderboard\": {\n \"inst_level_strict_acc,none\"\
: 0.40047961630695444,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"inst_level_loose_acc,none\": 0.44004796163069543,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_strict_acc,none\": 0.2698706099815157,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.019102087526494387,\n \"\
prompt_level_loose_acc,none\": 0.31608133086876156,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.020008050377239083,\n \"acc_norm,none\": 0.3384355947593722,\n \
\ \"acc_norm_stderr,none\": 0.005080787604160283,\n \"exact_match,none\"\
: 0.015105740181268883,\n \"exact_match_stderr,none\": 0.0033496871269877494,\n\
\ \"acc,none\": 0.13522273936170212,\n \"acc_stderr,none\": 0.0031176412001567644,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.33952438812706126,\n \"acc_norm_stderr,none\": 0.00578708020278095,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.712,\n \"acc_norm_stderr,none\": 0.028697004587398257\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.264,\n \"acc_norm_stderr,none\": 0.027934518957690866\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.608,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.076,\n \"acc_norm_stderr,none\": 0.01679357306785969\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255664\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750513\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.288,\n \"acc_norm_stderr,none\": 0.028697004587398253\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.22602739726027396,\n\
\ \"acc_norm_stderr,none\": 0.034734362688347356\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.18,\n \"acc_norm_stderr,none\": 0.02434689065029351\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.112,\n \"acc_norm_stderr,none\": 0.019985536939171485\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.5168539325842697,\n \"acc_norm_stderr,none\"\
: 0.037560944447344834\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652238982\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.28859060402684567,\n\
\ \"acc_norm_stderr,none\": 0.013138520572196355,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2727272727272727,\n\
\ \"acc_norm_stderr,none\": 0.03173071239071728\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2948717948717949,\n \"acc_norm_stderr,none\": 0.01953225605335253\n \
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28794642857142855,\n \"acc_norm_stderr,none\"\
: 0.021416989369571725\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.2698706099815157,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.019102087526494387,\n \
\ \"inst_level_strict_acc,none\": 0.40047961630695444,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.31608133086876156,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.020008050377239083,\n \"inst_level_loose_acc,none\"\
: 0.44004796163069543,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n\
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.015105740181268883,\n\
\ \"exact_match_stderr,none\": 0.0033496871269877494,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.013029315960912053,\n \"exact_match_stderr,none\": 0.006482644725390246\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.016260162601626018,\n \"exact_match_stderr,none\": 0.011450452676925654\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.022727272727272728,\n \"exact_match_stderr,none\"\
: 0.0130210469090637\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.007142857142857143,\n \"exact_match_stderr,none\"\
: 0.005041703051390571\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.006493506493506494,\n \"exact_match_stderr,none\": 0.006493506493506494\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.03626943005181347,\n \"exact_match_stderr,none\"\
: 0.013492659751295115\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.007407407407407408,\n \"exact_match_stderr,none\": 0.007407407407407408\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.13522273936170212,\n \"acc_stderr,none\": 0.003117641200156765\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.4087301587301587,\n\
\ \"acc_norm_stderr,none\": 0.017613620896423727,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.296875,\n \"acc_norm_stderr,none\": 0.028610997088737832\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.416,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ }\n}\n```"
repo_url: https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_007
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-28-39.555176.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_28_39.555176
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-28-39.555176.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-28-39.555176.jsonl'
---
# Dataset Card for Evaluation run of LeroyDyer/SpydazWeb_AI_HumanAI_007
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [LeroyDyer/SpydazWeb_AI_HumanAI_007](https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_007)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_007-details",
name="LeroyDyer__SpydazWeb_AI_HumanAI_007__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-28-39.555176](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_007-details/blob/main/LeroyDyer__SpydazWeb_AI_HumanAI_007/results_2024-11-19T23-28-39.555176.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.40047961630695444,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.44004796163069543,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.2698706099815157,
"prompt_level_strict_acc_stderr,none": 0.019102087526494387,
"prompt_level_loose_acc,none": 0.31608133086876156,
"prompt_level_loose_acc_stderr,none": 0.020008050377239083,
"acc_norm,none": 0.3384355947593722,
"acc_norm_stderr,none": 0.005080787604160283,
"exact_match,none": 0.015105740181268883,
"exact_match_stderr,none": 0.0033496871269877494,
"acc,none": 0.13522273936170212,
"acc_stderr,none": 0.0031176412001567644,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.33952438812706126,
"acc_norm_stderr,none": 0.00578708020278095,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.028697004587398257
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.076,
"acc_norm_stderr,none": 0.01679357306785969
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750513
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.288,
"acc_norm_stderr,none": 0.028697004587398253
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.22602739726027396,
"acc_norm_stderr,none": 0.034734362688347356
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.18,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5168539325842697,
"acc_norm_stderr,none": 0.037560944447344834
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.28859060402684567,
"acc_norm_stderr,none": 0.013138520572196355,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071728
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2948717948717949,
"acc_norm_stderr,none": 0.01953225605335253
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28794642857142855,
"acc_norm_stderr,none": 0.021416989369571725
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.2698706099815157,
"prompt_level_strict_acc_stderr,none": 0.019102087526494387,
"inst_level_strict_acc,none": 0.40047961630695444,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.31608133086876156,
"prompt_level_loose_acc_stderr,none": 0.020008050377239083,
"inst_level_loose_acc,none": 0.44004796163069543,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.015105740181268883,
"exact_match_stderr,none": 0.0033496871269877494,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.013029315960912053,
"exact_match_stderr,none": 0.006482644725390246
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925654
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.0130210469090637
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.007142857142857143,
"exact_match_stderr,none": 0.005041703051390571
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.03626943005181347,
"exact_match_stderr,none": 0.013492659751295115
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.13522273936170212,
"acc_stderr,none": 0.003117641200156765
},
"leaderboard_musr": {
"acc_norm,none": 0.4087301587301587,
"acc_norm_stderr,none": 0.017613620896423727,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.296875,
"acc_norm_stderr,none": 0.028610997088737832
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.40047961630695444,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.44004796163069543,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.2698706099815157,
"prompt_level_strict_acc_stderr,none": 0.019102087526494387,
"prompt_level_loose_acc,none": 0.31608133086876156,
"prompt_level_loose_acc_stderr,none": 0.020008050377239083,
"acc_norm,none": 0.3384355947593722,
"acc_norm_stderr,none": 0.005080787604160283,
"exact_match,none": 0.015105740181268883,
"exact_match_stderr,none": 0.0033496871269877494,
"acc,none": 0.13522273936170212,
"acc_stderr,none": 0.0031176412001567644,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.33952438812706126,
"acc_norm_stderr,none": 0.00578708020278095,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.028697004587398257
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.076,
"acc_norm_stderr,none": 0.01679357306785969
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750513
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.288,
"acc_norm_stderr,none": 0.028697004587398253
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.22602739726027396,
"acc_norm_stderr,none": 0.034734362688347356
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.18,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5168539325842697,
"acc_norm_stderr,none": 0.037560944447344834
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.28859060402684567,
"acc_norm_stderr,none": 0.013138520572196355,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071728
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2948717948717949,
"acc_norm_stderr,none": 0.01953225605335253
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28794642857142855,
"acc_norm_stderr,none": 0.021416989369571725
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.2698706099815157,
"prompt_level_strict_acc_stderr,none": 0.019102087526494387,
"inst_level_strict_acc,none": 0.40047961630695444,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.31608133086876156,
"prompt_level_loose_acc_stderr,none": 0.020008050377239083,
"inst_level_loose_acc,none": 0.44004796163069543,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.015105740181268883,
"exact_match_stderr,none": 0.0033496871269877494,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.013029315960912053,
"exact_match_stderr,none": 0.006482644725390246
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925654
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.0130210469090637
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.007142857142857143,
"exact_match_stderr,none": 0.005041703051390571
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.03626943005181347,
"exact_match_stderr,none": 0.013492659751295115
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.13522273936170212,
"acc_stderr,none": 0.003117641200156765
},
"leaderboard_musr": {
"acc_norm,none": 0.4087301587301587,
"acc_norm_stderr,none": 0.017613620896423727,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.296875,
"acc_norm_stderr,none": 0.028610997088737832
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/win10__Norns-Qwen2.5-7B-details | open-llm-leaderboard | "2024-11-19T23:39:34Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:36:11Z" | ---
pretty_name: Evaluation run of win10/Norns-Qwen2.5-7B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [win10/Norns-Qwen2.5-7B](https://huggingface.co/win10/Norns-Qwen2.5-7B)\nThe dataset\
\ is composed of 38 configuration(s), each one corresponding to one of the evaluated\
\ task.\n\nThe dataset has been created from 1 run(s). Each run can be found as\
\ a specific split in each configuration, the split being named using the timestamp\
\ of the run.The \"train\" split is always pointing to the latest results.\n\nAn\
\ additional configuration \"results\" store all the aggregated results of the run.\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/win10__Norns-Qwen2.5-7B-details\"\
,\n\tname=\"win10__Norns-Qwen2.5-7B__leaderboard_bbh_boolean_expressions\",\n\t\
split=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results from\
\ run 2024-11-19T23-36-10.410545](https://huggingface.co/datasets/open-llm-leaderboard/win10__Norns-Qwen2.5-7B-details/blob/main/win10__Norns-Qwen2.5-7B/results_2024-11-19T23-36-10.410545.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"prompt_level_loose_acc,none\": 0.6136783733826248,\n \"\
prompt_level_loose_acc_stderr,none\": 0.020953088140869307,\n \"inst_level_strict_acc,none\"\
: 0.6606714628297362,\n \"inst_level_strict_acc_stderr,none\": \"N/A\"\
,\n \"acc_norm,none\": 0.4597224023868206,\n \"acc_norm_stderr,none\"\
: 0.005322907542890862,\n \"inst_level_loose_acc,none\": 0.6990407673860911,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"exact_match,none\"\
: 0.1555891238670695,\n \"exact_match_stderr,none\": 0.009524069929943578,\n\
\ \"acc,none\": 0.34133976063829785,\n \"acc_stderr,none\"\
: 0.004322881196692637,\n \"prompt_level_strict_acc,none\": 0.5637707948243993,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.021340853089940322,\n \
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\":\
\ {\n \"acc_norm,none\": 0.5028640860961638,\n \"acc_norm_stderr,none\"\
: 0.006174063612621372,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.864,\n\
\ \"acc_norm_stderr,none\": 0.021723342617052086\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5828877005347594,\n \"acc_norm_stderr,none\"\
: 0.0361545093114083\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.592,\n\
\ \"acc_norm_stderr,none\": 0.03114520984654851\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.628,\n \"acc_norm_stderr,none\":\
\ 0.03063032594455827\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.4,\n \"acc_norm_stderr,none\": 0.031046021028253316\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.752,\n \
\ \"acc_norm_stderr,none\": 0.027367497504863593\n },\n \"\
leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\": \" \
\ - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.424,\n \"acc_norm_stderr,none\": 0.03131803437491622\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.708,\n \"acc_norm_stderr,none\": 0.028814320402205634\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.464,\n\
\ \"acc_norm_stderr,none\": 0.03160397514522374\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.6027397260273972,\n \"acc_norm_stderr,none\"\
: 0.040636704038880346\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.6,\n \"acc_norm_stderr,none\": 0.031046021028253316\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.396,\n \
\ \"acc_norm_stderr,none\": 0.030993197854577898\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6573033707865169,\n \"acc_norm_stderr,none\"\
: 0.03567395111782629\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.204,\n\
\ \"acc_norm_stderr,none\": 0.025537121574548162\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422255\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652238982\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\":\
\ \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\": 0.508,\n\
\ \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.28439597315436244,\n\
\ \"acc_norm_stderr,none\": 0.0130760865262698,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.31313131313131315,\n \"acc_norm_stderr,none\": 0.033042050878136546\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2857142857142857,\n\
\ \"acc_norm_stderr,none\": 0.019351013185102753\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2700892857142857,\n \"acc_norm_stderr,none\"\
: 0.021000749078822437\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.5637707948243993,\n \"prompt_level_strict_acc_stderr,none\": 0.021340853089940322,\n\
\ \"inst_level_strict_acc,none\": 0.6606714628297362,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.6136783733826248,\n \"prompt_level_loose_acc_stderr,none\": 0.020953088140869307,\n\
\ \"inst_level_loose_acc,none\": 0.6990407673860911,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.1555891238670695,\n \"exact_match_stderr,none\"\
: 0.009524069929943578,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.2931596091205212,\n\
\ \"exact_match_stderr,none\": 0.026022680699478443\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.08943089430894309,\n \"exact_match_stderr,none\": 0.02583570560354723\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.09090909090909091,\n\
\ \"exact_match_stderr,none\": 0.0251172256361608\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\": \"\
\ - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.04642857142857143,\n \"exact_match_stderr,none\": 0.012597001290725617\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.09740259740259741,\n\
\ \"exact_match_stderr,none\": 0.023971024368870292\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.29533678756476683,\n \"exact_match_stderr,none\"\
: 0.032922966391551386\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.05925925925925926,\n \"exact_match_stderr,none\"\
: 0.02039673654232189\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.34133976063829785,\n\
\ \"acc_stderr,none\": 0.004322881196692637\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4074074074074074,\n \"acc_norm_stderr,none\"\
: 0.017536151989967247,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.508,\n\
\ \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.2734375,\n \"acc_norm_stderr,none\"\
: 0.027912287939448926\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.444,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ }\n },\n \"leaderboard\": {\n \"prompt_level_loose_acc,none\"\
: 0.6136783733826248,\n \"prompt_level_loose_acc_stderr,none\": 0.020953088140869307,\n\
\ \"inst_level_strict_acc,none\": 0.6606714628297362,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"acc_norm,none\": 0.4597224023868206,\n \"acc_norm_stderr,none\"\
: 0.005322907542890862,\n \"inst_level_loose_acc,none\": 0.6990407673860911,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"exact_match,none\"\
: 0.1555891238670695,\n \"exact_match_stderr,none\": 0.009524069929943578,\n\
\ \"acc,none\": 0.34133976063829785,\n \"acc_stderr,none\": 0.004322881196692637,\n\
\ \"prompt_level_strict_acc,none\": 0.5637707948243993,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.021340853089940322,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5028640860961638,\n \"acc_norm_stderr,none\"\
: 0.006174063612621372,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.864,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5828877005347594,\n \"acc_norm_stderr,none\"\
: 0.0361545093114083\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\"\
: 0.592,\n \"acc_norm_stderr,none\": 0.03114520984654851\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.628,\n \"acc_norm_stderr,none\": 0.03063032594455827\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.4,\n \"acc_norm_stderr,none\": 0.031046021028253316\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.752,\n \"acc_norm_stderr,none\": 0.027367497504863593\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.424,\n \"acc_norm_stderr,none\": 0.03131803437491622\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554571\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.708,\n \"acc_norm_stderr,none\": 0.028814320402205634\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.6027397260273972,\n\
\ \"acc_norm_stderr,none\": 0.040636704038880346\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.6,\n \"acc_norm_stderr,none\": 0.031046021028253316\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6573033707865169,\n \"acc_norm_stderr,none\"\
: 0.03567395111782629\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422255\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652238982\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.28439597315436244,\n\
\ \"acc_norm_stderr,none\": 0.0130760865262698,\n \"alias\": \" -\
\ leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"alias\"\
: \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.31313131313131315,\n\
\ \"acc_norm_stderr,none\": 0.033042050878136546\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2857142857142857,\n \"acc_norm_stderr,none\": 0.019351013185102753\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2700892857142857,\n \"acc_norm_stderr,none\"\
: 0.021000749078822437\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.5637707948243993,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.021340853089940322,\n \
\ \"inst_level_strict_acc,none\": 0.6606714628297362,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.6136783733826248,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.020953088140869307,\n \"inst_level_loose_acc,none\"\
: 0.6990407673860911,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.1555891238670695,\n\
\ \"exact_match_stderr,none\": 0.009524069929943578,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.2931596091205212,\n \"exact_match_stderr,none\": 0.026022680699478443\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.08943089430894309,\n \"exact_match_stderr,none\": 0.02583570560354723\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.09090909090909091,\n \"exact_match_stderr,none\"\
: 0.0251172256361608\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.04642857142857143,\n \"exact_match_stderr,none\"\
: 0.012597001290725617\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.09740259740259741,\n \"exact_match_stderr,none\": 0.023971024368870292\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.29533678756476683,\n \"exact_match_stderr,none\"\
: 0.032922966391551386\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.05925925925925926,\n \"exact_match_stderr,none\": 0.02039673654232189\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.34133976063829785,\n \"acc_stderr,none\": 0.004322881196692637\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.4074074074074074,\n\
\ \"acc_norm_stderr,none\": 0.017536151989967247,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.2734375,\n \"acc_norm_stderr,none\": 0.027912287939448926\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.444,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ }\n}\n```"
repo_url: https://huggingface.co/win10/Norns-Qwen2.5-7B
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-36-10.410545.jsonl'
- config_name: win10__Norns-Qwen2.5-7B__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_36_10.410545
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-36-10.410545.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-36-10.410545.jsonl'
---
# Dataset Card for Evaluation run of win10/Norns-Qwen2.5-7B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [win10/Norns-Qwen2.5-7B](https://huggingface.co/win10/Norns-Qwen2.5-7B)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/win10__Norns-Qwen2.5-7B-details",
name="win10__Norns-Qwen2.5-7B__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-36-10.410545](https://huggingface.co/datasets/open-llm-leaderboard/win10__Norns-Qwen2.5-7B-details/blob/main/win10__Norns-Qwen2.5-7B/results_2024-11-19T23-36-10.410545.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"prompt_level_loose_acc,none": 0.6136783733826248,
"prompt_level_loose_acc_stderr,none": 0.020953088140869307,
"inst_level_strict_acc,none": 0.6606714628297362,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.4597224023868206,
"acc_norm_stderr,none": 0.005322907542890862,
"inst_level_loose_acc,none": 0.6990407673860911,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.1555891238670695,
"exact_match_stderr,none": 0.009524069929943578,
"acc,none": 0.34133976063829785,
"acc_stderr,none": 0.004322881196692637,
"prompt_level_strict_acc,none": 0.5637707948243993,
"prompt_level_strict_acc_stderr,none": 0.021340853089940322,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5028640860961638,
"acc_norm_stderr,none": 0.006174063612621372,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.864,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5828877005347594,
"acc_norm_stderr,none": 0.0361545093114083
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.03063032594455827
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.4,
"acc_norm_stderr,none": 0.031046021028253316
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.752,
"acc_norm_stderr,none": 0.027367497504863593
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.424,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.708,
"acc_norm_stderr,none": 0.028814320402205634
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.6027397260273972,
"acc_norm_stderr,none": 0.040636704038880346
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.6,
"acc_norm_stderr,none": 0.031046021028253316
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6573033707865169,
"acc_norm_stderr,none": 0.03567395111782629
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_gpqa": {
"acc_norm,none": 0.28439597315436244,
"acc_norm_stderr,none": 0.0130760865262698,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.31313131313131315,
"acc_norm_stderr,none": 0.033042050878136546
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2857142857142857,
"acc_norm_stderr,none": 0.019351013185102753
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2700892857142857,
"acc_norm_stderr,none": 0.021000749078822437
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5637707948243993,
"prompt_level_strict_acc_stderr,none": 0.021340853089940322,
"inst_level_strict_acc,none": 0.6606714628297362,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.6136783733826248,
"prompt_level_loose_acc_stderr,none": 0.020953088140869307,
"inst_level_loose_acc,none": 0.6990407673860911,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.1555891238670695,
"exact_match_stderr,none": 0.009524069929943578,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.2931596091205212,
"exact_match_stderr,none": 0.026022680699478443
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.08943089430894309,
"exact_match_stderr,none": 0.02583570560354723
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.09090909090909091,
"exact_match_stderr,none": 0.0251172256361608
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.04642857142857143,
"exact_match_stderr,none": 0.012597001290725617
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.09740259740259741,
"exact_match_stderr,none": 0.023971024368870292
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.29533678756476683,
"exact_match_stderr,none": 0.032922966391551386
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.05925925925925926,
"exact_match_stderr,none": 0.02039673654232189
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.34133976063829785,
"acc_stderr,none": 0.004322881196692637
},
"leaderboard_musr": {
"acc_norm,none": 0.4074074074074074,
"acc_norm_stderr,none": 0.017536151989967247,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2734375,
"acc_norm_stderr,none": 0.027912287939448926
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.444,
"acc_norm_stderr,none": 0.03148684942554571
}
},
"leaderboard": {
"prompt_level_loose_acc,none": 0.6136783733826248,
"prompt_level_loose_acc_stderr,none": 0.020953088140869307,
"inst_level_strict_acc,none": 0.6606714628297362,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.4597224023868206,
"acc_norm_stderr,none": 0.005322907542890862,
"inst_level_loose_acc,none": 0.6990407673860911,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.1555891238670695,
"exact_match_stderr,none": 0.009524069929943578,
"acc,none": 0.34133976063829785,
"acc_stderr,none": 0.004322881196692637,
"prompt_level_strict_acc,none": 0.5637707948243993,
"prompt_level_strict_acc_stderr,none": 0.021340853089940322,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5028640860961638,
"acc_norm_stderr,none": 0.006174063612621372,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.864,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5828877005347594,
"acc_norm_stderr,none": 0.0361545093114083
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.03063032594455827
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.4,
"acc_norm_stderr,none": 0.031046021028253316
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.752,
"acc_norm_stderr,none": 0.027367497504863593
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.424,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.708,
"acc_norm_stderr,none": 0.028814320402205634
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.6027397260273972,
"acc_norm_stderr,none": 0.040636704038880346
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.6,
"acc_norm_stderr,none": 0.031046021028253316
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6573033707865169,
"acc_norm_stderr,none": 0.03567395111782629
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_gpqa": {
"acc_norm,none": 0.28439597315436244,
"acc_norm_stderr,none": 0.0130760865262698,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.31313131313131315,
"acc_norm_stderr,none": 0.033042050878136546
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2857142857142857,
"acc_norm_stderr,none": 0.019351013185102753
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2700892857142857,
"acc_norm_stderr,none": 0.021000749078822437
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5637707948243993,
"prompt_level_strict_acc_stderr,none": 0.021340853089940322,
"inst_level_strict_acc,none": 0.6606714628297362,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.6136783733826248,
"prompt_level_loose_acc_stderr,none": 0.020953088140869307,
"inst_level_loose_acc,none": 0.6990407673860911,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.1555891238670695,
"exact_match_stderr,none": 0.009524069929943578,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.2931596091205212,
"exact_match_stderr,none": 0.026022680699478443
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.08943089430894309,
"exact_match_stderr,none": 0.02583570560354723
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.09090909090909091,
"exact_match_stderr,none": 0.0251172256361608
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.04642857142857143,
"exact_match_stderr,none": 0.012597001290725617
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.09740259740259741,
"exact_match_stderr,none": 0.023971024368870292
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.29533678756476683,
"exact_match_stderr,none": 0.032922966391551386
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.05925925925925926,
"exact_match_stderr,none": 0.02039673654232189
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.34133976063829785,
"acc_stderr,none": 0.004322881196692637
},
"leaderboard_musr": {
"acc_norm,none": 0.4074074074074074,
"acc_norm_stderr,none": 0.017536151989967247,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2734375,
"acc_norm_stderr,none": 0.027912287939448926
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.444,
"acc_norm_stderr,none": 0.03148684942554571
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/win10__EVA-Norns-Qwen2.5-v0.1-details | open-llm-leaderboard | "2024-11-19T23:39:51Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:36:26Z" | ---
pretty_name: Evaluation run of win10/EVA-Norns-Qwen2.5-v0.1
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [win10/EVA-Norns-Qwen2.5-v0.1](https://huggingface.co/win10/EVA-Norns-Qwen2.5-v0.1)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/win10__EVA-Norns-Qwen2.5-v0.1-details\"\
,\n\tname=\"win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-36-26.139733](https://huggingface.co/datasets/open-llm-leaderboard/win10__EVA-Norns-Qwen2.5-v0.1-details/blob/main/win10__EVA-Norns-Qwen2.5-v0.1/results_2024-11-19T23-36-26.139733.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"prompt_level_strict_acc,none\": 0.5748613678373382,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.02127403980535566,\n \"\
acc,none\": 0.3425033244680851,\n \"acc_stderr,none\": 0.004326416366929387,\n\
\ \"acc_norm,none\": 0.45933324685432614,\n \"acc_norm_stderr,none\"\
: 0.005328572656791026,\n \"prompt_level_loose_acc,none\": 0.600739371534196,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.021075331332701255,\n \
\ \"inst_level_strict_acc,none\": 0.6690647482014388,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"inst_level_loose_acc,none\": 0.6870503597122302,\n \
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"exact_match,none\"\
: 0.15483383685800603,\n \"exact_match_stderr,none\": 0.00950587639647196,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5026905051206387,\n \"acc_norm_stderr,none\"\
: 0.006183963812405072,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.86,\n\
\ \"acc_norm_stderr,none\": 0.021989409645240245\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5828877005347594,\n \"acc_norm_stderr,none\"\
: 0.0361545093114083\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.592,\n\
\ \"acc_norm_stderr,none\": 0.03114520984654851\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.62,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\"\
: \" - leaderboard_bbh_geometric_shapes\",\n \"acc_norm,none\": 0.392,\n\
\ \"acc_norm_stderr,none\": 0.030938207620401222\n },\n \
\ \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.748,\n \"acc_norm_stderr,none\":\
\ 0.027513851933031318\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.428,\n \"acc_norm_stderr,none\":\
\ 0.031355968923772626\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\":\
\ 0.03139181076542942\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\":\
\ 0.03148684942554571\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \
\ \"acc_norm,none\": 0.704,\n \"acc_norm_stderr,none\": 0.028928939388379697\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \"\
\ - leaderboard_bbh_navigate\",\n \"acc_norm,none\": 0.544,\n \
\ \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.6095890410958904,\n \"acc_norm_stderr,none\": 0.040513109165891854\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.596,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.396,\n \
\ \"acc_norm_stderr,none\": 0.030993197854577898\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6573033707865169,\n \"acc_norm_stderr,none\"\
: 0.03567395111782629\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.212,\n\
\ \"acc_norm_stderr,none\": 0.025901884690541117\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\":\
\ 0.022752024491765464\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.252,\n \"acc_norm_stderr,none\":\
\ 0.027513851933031318\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.28523489932885904,\n\
\ \"acc_norm_stderr,none\": 0.013082167406856764,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.32323232323232326,\n \"acc_norm_stderr,none\": 0.03332299921070644\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2875457875457875,\n\
\ \"acc_norm_stderr,none\": 0.019388032230751553\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.265625,\n \"acc_norm_stderr,none\"\
: 0.02089005840079951\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.5748613678373382,\n \"prompt_level_strict_acc_stderr,none\": 0.021274039805355655,\n\
\ \"inst_level_strict_acc,none\": 0.6690647482014388,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.600739371534196,\n \"prompt_level_loose_acc_stderr,none\": 0.021075331332701255,\n\
\ \"inst_level_loose_acc,none\": 0.6870503597122302,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.15483383685800603,\n \"exact_match_stderr,none\"\
: 0.00950587639647196,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.2899022801302932,\n\
\ \"exact_match_stderr,none\": 0.02593726393580584\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \" \
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.08130081300813008,\n \"exact_match_stderr,none\": 0.024743104820672362\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.08333333333333333,\n\
\ \"exact_match_stderr,none\": 0.02414790444796731\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\": \"\
\ - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.04642857142857143,\n \"exact_match_stderr,none\": 0.012597001290725617\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.11688311688311688,\n\
\ \"exact_match_stderr,none\": 0.025974025974025972\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.29533678756476683,\n \"exact_match_stderr,none\"\
: 0.032922966391551386\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.05185185185185185,\n \"exact_match_stderr,none\"\
: 0.019154368449050496\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.3425033244680851,\n\
\ \"acc_stderr,none\": 0.004326416366929387\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.40343915343915343,\n \"acc_norm_stderr,none\"\
: 0.01750120545396071,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.508,\n\
\ \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.26953125,\n \"acc_norm_stderr,none\"\
: 0.02778659256840428\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n\
\ }\n },\n \"leaderboard\": {\n \"prompt_level_strict_acc,none\"\
: 0.5748613678373382,\n \"prompt_level_strict_acc_stderr,none\": 0.02127403980535566,\n\
\ \"acc,none\": 0.3425033244680851,\n \"acc_stderr,none\": 0.004326416366929387,\n\
\ \"acc_norm,none\": 0.45933324685432614,\n \"acc_norm_stderr,none\"\
: 0.005328572656791026,\n \"prompt_level_loose_acc,none\": 0.600739371534196,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.021075331332701255,\n \
\ \"inst_level_strict_acc,none\": 0.6690647482014388,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"inst_level_loose_acc,none\": 0.6870503597122302,\n \
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"exact_match,none\":\
\ 0.15483383685800603,\n \"exact_match_stderr,none\": 0.00950587639647196,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.5026905051206387,\n \"acc_norm_stderr,none\": 0.006183963812405072,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.86,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5828877005347594,\n \"acc_norm_stderr,none\"\
: 0.0361545093114083\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014505\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.592,\n \"acc_norm_stderr,none\": 0.03114520984654851\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.62,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.748,\n \"acc_norm_stderr,none\": 0.027513851933031318\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.428,\n \"acc_norm_stderr,none\": 0.031355968923772626\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554571\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.704,\n \"acc_norm_stderr,none\": 0.028928939388379697\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.6095890410958904,\n\
\ \"acc_norm_stderr,none\": 0.040513109165891854\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.596,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6573033707865169,\n \"acc_norm_stderr,none\"\
: 0.03567395111782629\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\": 0.025901884690541117\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.252,\n \"acc_norm_stderr,none\": 0.027513851933031318\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.28523489932885904,\n\
\ \"acc_norm_stderr,none\": 0.013082167406856764,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.32323232323232326,\n\
\ \"acc_norm_stderr,none\": 0.03332299921070644\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2875457875457875,\n \"acc_norm_stderr,none\": 0.019388032230751553\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.265625,\n \"acc_norm_stderr,none\": 0.02089005840079951\n\
\ },\n \"leaderboard_ifeval\": {\n \"alias\": \" - leaderboard_ifeval\"\
,\n \"prompt_level_strict_acc,none\": 0.5748613678373382,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.021274039805355655,\n \"inst_level_strict_acc,none\": 0.6690647482014388,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.600739371534196,\n \"prompt_level_loose_acc_stderr,none\": 0.021075331332701255,\n\
\ \"inst_level_loose_acc,none\": 0.6870503597122302,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.15483383685800603,\n \"exact_match_stderr,none\": 0.00950587639647196,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.2899022801302932,\n \"exact_match_stderr,none\": 0.02593726393580584\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.08130081300813008,\n \"exact_match_stderr,none\": 0.024743104820672362\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.08333333333333333,\n \"exact_match_stderr,none\"\
: 0.02414790444796731\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.04642857142857143,\n \"exact_match_stderr,none\"\
: 0.012597001290725617\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.11688311688311688,\n \"exact_match_stderr,none\": 0.025974025974025972\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.29533678756476683,\n \"exact_match_stderr,none\"\
: 0.032922966391551386\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.05185185185185185,\n \"exact_match_stderr,none\": 0.019154368449050496\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.3425033244680851,\n \"acc_stderr,none\": 0.004326416366929387\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.40343915343915343,\n\
\ \"acc_norm_stderr,none\": 0.01750120545396071,\n \"alias\": \" -\
\ leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.26953125,\n \"acc_norm_stderr,none\": 0.02778659256840428\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n\
\ }\n}\n```"
repo_url: https://huggingface.co/win10/EVA-Norns-Qwen2.5-v0.1
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-36-26.139733.jsonl'
- config_name: win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_36_26.139733
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-36-26.139733.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-36-26.139733.jsonl'
---
# Dataset Card for Evaluation run of win10/EVA-Norns-Qwen2.5-v0.1
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [win10/EVA-Norns-Qwen2.5-v0.1](https://huggingface.co/win10/EVA-Norns-Qwen2.5-v0.1)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/win10__EVA-Norns-Qwen2.5-v0.1-details",
name="win10__EVA-Norns-Qwen2.5-v0.1__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-36-26.139733](https://huggingface.co/datasets/open-llm-leaderboard/win10__EVA-Norns-Qwen2.5-v0.1-details/blob/main/win10__EVA-Norns-Qwen2.5-v0.1/results_2024-11-19T23-36-26.139733.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"prompt_level_strict_acc,none": 0.5748613678373382,
"prompt_level_strict_acc_stderr,none": 0.02127403980535566,
"acc,none": 0.3425033244680851,
"acc_stderr,none": 0.004326416366929387,
"acc_norm,none": 0.45933324685432614,
"acc_norm_stderr,none": 0.005328572656791026,
"prompt_level_loose_acc,none": 0.600739371534196,
"prompt_level_loose_acc_stderr,none": 0.021075331332701255,
"inst_level_strict_acc,none": 0.6690647482014388,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.6870503597122302,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.15483383685800603,
"exact_match_stderr,none": 0.00950587639647196,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5026905051206387,
"acc_norm_stderr,none": 0.006183963812405072,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.86,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5828877005347594,
"acc_norm_stderr,none": 0.0361545093114083
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.62,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.748,
"acc_norm_stderr,none": 0.027513851933031318
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.428,
"acc_norm_stderr,none": 0.031355968923772626
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.6095890410958904,
"acc_norm_stderr,none": 0.040513109165891854
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.596,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6573033707865169,
"acc_norm_stderr,none": 0.03567395111782629
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.252,
"acc_norm_stderr,none": 0.027513851933031318
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_gpqa": {
"acc_norm,none": 0.28523489932885904,
"acc_norm_stderr,none": 0.013082167406856764,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.32323232323232326,
"acc_norm_stderr,none": 0.03332299921070644
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2875457875457875,
"acc_norm_stderr,none": 0.019388032230751553
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.265625,
"acc_norm_stderr,none": 0.02089005840079951
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5748613678373382,
"prompt_level_strict_acc_stderr,none": 0.021274039805355655,
"inst_level_strict_acc,none": 0.6690647482014388,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.600739371534196,
"prompt_level_loose_acc_stderr,none": 0.021075331332701255,
"inst_level_loose_acc,none": 0.6870503597122302,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.15483383685800603,
"exact_match_stderr,none": 0.00950587639647196,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.2899022801302932,
"exact_match_stderr,none": 0.02593726393580584
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.08130081300813008,
"exact_match_stderr,none": 0.024743104820672362
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.08333333333333333,
"exact_match_stderr,none": 0.02414790444796731
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.04642857142857143,
"exact_match_stderr,none": 0.012597001290725617
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.11688311688311688,
"exact_match_stderr,none": 0.025974025974025972
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.29533678756476683,
"exact_match_stderr,none": 0.032922966391551386
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.05185185185185185,
"exact_match_stderr,none": 0.019154368449050496
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3425033244680851,
"acc_stderr,none": 0.004326416366929387
},
"leaderboard_musr": {
"acc_norm,none": 0.40343915343915343,
"acc_norm_stderr,none": 0.01750120545396071,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.26953125,
"acc_norm_stderr,none": 0.02778659256840428
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
}
},
"leaderboard": {
"prompt_level_strict_acc,none": 0.5748613678373382,
"prompt_level_strict_acc_stderr,none": 0.02127403980535566,
"acc,none": 0.3425033244680851,
"acc_stderr,none": 0.004326416366929387,
"acc_norm,none": 0.45933324685432614,
"acc_norm_stderr,none": 0.005328572656791026,
"prompt_level_loose_acc,none": 0.600739371534196,
"prompt_level_loose_acc_stderr,none": 0.021075331332701255,
"inst_level_strict_acc,none": 0.6690647482014388,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.6870503597122302,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.15483383685800603,
"exact_match_stderr,none": 0.00950587639647196,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5026905051206387,
"acc_norm_stderr,none": 0.006183963812405072,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.86,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5828877005347594,
"acc_norm_stderr,none": 0.0361545093114083
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.62,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.748,
"acc_norm_stderr,none": 0.027513851933031318
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.428,
"acc_norm_stderr,none": 0.031355968923772626
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.6095890410958904,
"acc_norm_stderr,none": 0.040513109165891854
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.596,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6573033707865169,
"acc_norm_stderr,none": 0.03567395111782629
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.252,
"acc_norm_stderr,none": 0.027513851933031318
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_gpqa": {
"acc_norm,none": 0.28523489932885904,
"acc_norm_stderr,none": 0.013082167406856764,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.32323232323232326,
"acc_norm_stderr,none": 0.03332299921070644
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2875457875457875,
"acc_norm_stderr,none": 0.019388032230751553
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.265625,
"acc_norm_stderr,none": 0.02089005840079951
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5748613678373382,
"prompt_level_strict_acc_stderr,none": 0.021274039805355655,
"inst_level_strict_acc,none": 0.6690647482014388,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.600739371534196,
"prompt_level_loose_acc_stderr,none": 0.021075331332701255,
"inst_level_loose_acc,none": 0.6870503597122302,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.15483383685800603,
"exact_match_stderr,none": 0.00950587639647196,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.2899022801302932,
"exact_match_stderr,none": 0.02593726393580584
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.08130081300813008,
"exact_match_stderr,none": 0.024743104820672362
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.08333333333333333,
"exact_match_stderr,none": 0.02414790444796731
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.04642857142857143,
"exact_match_stderr,none": 0.012597001290725617
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.11688311688311688,
"exact_match_stderr,none": 0.025974025974025972
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.29533678756476683,
"exact_match_stderr,none": 0.032922966391551386
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.05185185185185185,
"exact_match_stderr,none": 0.019154368449050496
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3425033244680851,
"acc_stderr,none": 0.004326416366929387
},
"leaderboard_musr": {
"acc_norm,none": 0.40343915343915343,
"acc_norm_stderr,none": 0.01750120545396071,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.26953125,
"acc_norm_stderr,none": 0.02778659256840428
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_RP-details | open-llm-leaderboard | "2024-11-19T23:39:51Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:36:46Z" | ---
pretty_name: Evaluation run of LeroyDyer/SpydazWeb_AI_HumanAI_RP
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [LeroyDyer/SpydazWeb_AI_HumanAI_RP](https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_RP)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_RP-details\"\
,\n\tname=\"LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-36-45.486047](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_RP-details/blob/main/LeroyDyer__SpydazWeb_AI_HumanAI_RP/results_2024-11-19T23-36-45.486047.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.31414868105515587,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\": 0.3280581138928525,\n\
\ \"acc_norm_stderr,none\": 0.005089377844034893,\n \"exact_match,none\"\
: 0.006042296072507553,\n \"exact_match_stderr,none\": 0.0021277822944088735,\n\
\ \"prompt_level_loose_acc,none\": 0.21072088724584104,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n \"\
inst_level_loose_acc,none\": 0.3441247002398082,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_strict_acc,none\": 0.19408502772643252,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n \
\ \"acc,none\": 0.1323969414893617,\n \"acc_stderr,none\": 0.0030899300648995166,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.3311925013018573,\n \"acc_norm_stderr,none\"\
: 0.005826333297557237,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.728,\n\
\ \"acc_norm_stderr,none\": 0.028200088296309975\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.356,\n\
\ \"acc_norm_stderr,none\": 0.0303436806571532\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\":\
\ 0.031621252575725574\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.484,\n \
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\":\
\ 0.024960691989171963\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\":\
\ 0.022995023034068682\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.352,\n \"acc_norm_stderr,none\":\
\ 0.030266288057359866\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \
\ \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \"\
\ - leaderboard_bbh_navigate\",\n \"acc_norm,none\": 0.42,\n \
\ \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.2191780821917808,\n \"acc_norm_stderr,none\": 0.03435504786264928\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.208,\n \
\ \"acc_norm_stderr,none\": 0.02572139890141637\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
,\n \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.42134831460674155,\n\
\ \"acc_norm_stderr,none\": 0.03711441405960183\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.596,\n \"acc_norm_stderr,none\":\
\ 0.03109668818482536\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.256,\n \"acc_norm_stderr,none\": 0.027657108718204846\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\":\
\ 0.025901884690541117\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\":\
\ 0.02391551394448624\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\":\
\ 0.029844039047465857\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2751677852348993,\n\
\ \"acc_norm_stderr,none\": 0.012951233546715653,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2676767676767677,\n \"acc_norm_stderr,none\": 0.031544498882702825\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2783882783882784,\n\
\ \"acc_norm_stderr,none\": 0.01919901779044858\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27455357142857145,\n \"acc_norm_stderr,none\"\
: 0.021108747290633768\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.19408502772643252,\n \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n\
\ \"inst_level_strict_acc,none\": 0.31414868105515587,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.21072088724584104,\n \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n\
\ \"inst_level_loose_acc,none\": 0.3441247002398082,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.006042296072507553,\n \"exact_match_stderr,none\"\
: 0.0021277822944088735,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.003257328990228013,\n\
\ \"exact_match_stderr,none\": 0.003257328990228013\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.007575757575757576,\n\
\ \"exact_match_stderr,none\": 0.007575757575757577\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0035714285714285713,\n \"exact_match_stderr,none\": 0.0035714285714285713\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.02072538860103627,\n \"exact_match_stderr,none\"\
: 0.01028141701190903\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.1323969414893617,\n \"acc_stderr,none\"\
: 0.0030899300648995166\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.38756613756613756,\n \"acc_norm_stderr,none\"\
: 0.017464908204620225,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.508,\n\
\ \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.30078125,\n \"acc_norm_stderr,none\"\
: 0.02871850463421181\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.356,\n \"acc_norm_stderr,none\": 0.0303436806571532\n\
\ }\n },\n \"leaderboard\": {\n \"inst_level_strict_acc,none\"\
: 0.31414868105515587,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"acc_norm,none\": 0.3280581138928525,\n \"acc_norm_stderr,none\"\
: 0.005089377844034893,\n \"exact_match,none\": 0.006042296072507553,\n \
\ \"exact_match_stderr,none\": 0.0021277822944088735,\n \"prompt_level_loose_acc,none\"\
: 0.21072088724584104,\n \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n\
\ \"inst_level_loose_acc,none\": 0.3441247002398082,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_strict_acc,none\": 0.19408502772643252,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n \"\
acc,none\": 0.1323969414893617,\n \"acc_stderr,none\": 0.0030899300648995166,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.3311925013018573,\n \"acc_norm_stderr,none\": 0.005826333297557237,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.728,\n \"acc_norm_stderr,none\": 0.028200088296309975\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.356,\n \"acc_norm_stderr,none\": 0.0303436806571532\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.484,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.352,\n \"acc_norm_stderr,none\": 0.030266288057359866\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2191780821917808,\n\
\ \"acc_norm_stderr,none\": 0.03435504786264928\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.42134831460674155,\n \"acc_norm_stderr,none\"\
: 0.03711441405960183\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.596,\n \"acc_norm_stderr,none\": 0.03109668818482536\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.256,\n \"acc_norm_stderr,none\": 0.027657108718204846\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\": 0.025901884690541117\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2751677852348993,\n\
\ \"acc_norm_stderr,none\": 0.012951233546715653,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2676767676767677,\n\
\ \"acc_norm_stderr,none\": 0.031544498882702825\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2783882783882784,\n \"acc_norm_stderr,none\": 0.01919901779044858\n \
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27455357142857145,\n \"acc_norm_stderr,none\"\
: 0.021108747290633768\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.19408502772643252,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01701938055074939,\n \
\ \"inst_level_strict_acc,none\": 0.31414868105515587,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.21072088724584104,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.017549801883664215,\n \"inst_level_loose_acc,none\"\
: 0.3441247002398082,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.006042296072507553,\n\
\ \"exact_match_stderr,none\": 0.0021277822944088735,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.003257328990228013,\n \"exact_match_stderr,none\": 0.003257328990228013\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.007575757575757576,\n \"exact_match_stderr,none\"\
: 0.007575757575757577\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0035714285714285713,\n \"exact_match_stderr,none\"\
: 0.0035714285714285713\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \"exact_match,none\"\
: 0.02072538860103627,\n \"exact_match_stderr,none\": 0.01028141701190903\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.1323969414893617,\n\
\ \"acc_stderr,none\": 0.0030899300648995166\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.38756613756613756,\n \"acc_norm_stderr,none\"\
: 0.017464908204620225,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.30078125,\n\
\ \"acc_norm_stderr,none\": 0.02871850463421181\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.356,\n \"acc_norm_stderr,none\": 0.0303436806571532\n }\n}\n```"
repo_url: https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_RP
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-36-45.486047.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_36_45.486047
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-36-45.486047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-36-45.486047.jsonl'
---
# Dataset Card for Evaluation run of LeroyDyer/SpydazWeb_AI_HumanAI_RP
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [LeroyDyer/SpydazWeb_AI_HumanAI_RP](https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_RP)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_RP-details",
name="LeroyDyer__SpydazWeb_AI_HumanAI_RP__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-36-45.486047](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_RP-details/blob/main/LeroyDyer__SpydazWeb_AI_HumanAI_RP/results_2024-11-19T23-36-45.486047.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.31414868105515587,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3280581138928525,
"acc_norm_stderr,none": 0.005089377844034893,
"exact_match,none": 0.006042296072507553,
"exact_match_stderr,none": 0.0021277822944088735,
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"inst_level_loose_acc,none": 0.3441247002398082,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"acc,none": 0.1323969414893617,
"acc_stderr,none": 0.0030899300648995166,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3311925013018573,
"acc_norm_stderr,none": 0.005826333297557237,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.356,
"acc_norm_stderr,none": 0.0303436806571532
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2191780821917808,
"acc_norm_stderr,none": 0.03435504786264928
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.42134831460674155,
"acc_norm_stderr,none": 0.03711441405960183
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.596,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.256,
"acc_norm_stderr,none": 0.027657108718204846
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2751677852348993,
"acc_norm_stderr,none": 0.012951233546715653,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2676767676767677,
"acc_norm_stderr,none": 0.031544498882702825
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2783882783882784,
"acc_norm_stderr,none": 0.01919901779044858
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27455357142857145,
"acc_norm_stderr,none": 0.021108747290633768
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"inst_level_strict_acc,none": 0.31414868105515587,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"inst_level_loose_acc,none": 0.3441247002398082,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.006042296072507553,
"exact_match_stderr,none": 0.0021277822944088735,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285713
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.02072538860103627,
"exact_match_stderr,none": 0.01028141701190903
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.1323969414893617,
"acc_stderr,none": 0.0030899300648995166
},
"leaderboard_musr": {
"acc_norm,none": 0.38756613756613756,
"acc_norm_stderr,none": 0.017464908204620225,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.30078125,
"acc_norm_stderr,none": 0.02871850463421181
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.356,
"acc_norm_stderr,none": 0.0303436806571532
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.31414868105515587,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3280581138928525,
"acc_norm_stderr,none": 0.005089377844034893,
"exact_match,none": 0.006042296072507553,
"exact_match_stderr,none": 0.0021277822944088735,
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"inst_level_loose_acc,none": 0.3441247002398082,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"acc,none": 0.1323969414893617,
"acc_stderr,none": 0.0030899300648995166,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3311925013018573,
"acc_norm_stderr,none": 0.005826333297557237,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.356,
"acc_norm_stderr,none": 0.0303436806571532
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2191780821917808,
"acc_norm_stderr,none": 0.03435504786264928
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.42134831460674155,
"acc_norm_stderr,none": 0.03711441405960183
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.596,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.256,
"acc_norm_stderr,none": 0.027657108718204846
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2751677852348993,
"acc_norm_stderr,none": 0.012951233546715653,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2676767676767677,
"acc_norm_stderr,none": 0.031544498882702825
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2783882783882784,
"acc_norm_stderr,none": 0.01919901779044858
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27455357142857145,
"acc_norm_stderr,none": 0.021108747290633768
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.19408502772643252,
"prompt_level_strict_acc_stderr,none": 0.01701938055074939,
"inst_level_strict_acc,none": 0.31414868105515587,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.21072088724584104,
"prompt_level_loose_acc_stderr,none": 0.017549801883664215,
"inst_level_loose_acc,none": 0.3441247002398082,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.006042296072507553,
"exact_match_stderr,none": 0.0021277822944088735,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285713
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.02072538860103627,
"exact_match_stderr,none": 0.01028141701190903
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.1323969414893617,
"acc_stderr,none": 0.0030899300648995166
},
"leaderboard_musr": {
"acc_norm,none": 0.38756613756613756,
"acc_norm_stderr,none": 0.017464908204620225,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.30078125,
"acc_norm_stderr,none": 0.02871850463421181
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.356,
"acc_norm_stderr,none": 0.0303436806571532
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_TextVision-details | open-llm-leaderboard | "2024-11-19T23:40:24Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:37:18Z" | ---
pretty_name: Evaluation run of LeroyDyer/SpydazWeb_AI_HumanAI_TextVision
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [LeroyDyer/SpydazWeb_AI_HumanAI_TextVision](https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_TextVision)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_TextVision-details\"\
,\n\tname=\"LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-37-17.474204](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_TextVision-details/blob/main/LeroyDyer__SpydazWeb_AI_HumanAI_TextVision/results_2024-11-19T23-37-17.474204.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"prompt_level_strict_acc,none\": 0.23844731977818853,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.01833788809424391,\n \"\
acc_norm,none\": 0.3316902321961344,\n \"acc_norm_stderr,none\": 0.005074965442699723,\n\
\ \"acc,none\": 0.13871343085106383,\n \"acc_stderr,none\"\
: 0.0031512454438591977,\n \"inst_level_strict_acc,none\": 0.37410071942446044,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.26247689463955637,\n \"prompt_level_loose_acc_stderr,none\": 0.0189337428760446,\n\
\ \"exact_match,none\": 0.005287009063444109,\n \"exact_match_stderr,none\"\
: 0.0019938227617985627,\n \"inst_level_loose_acc,none\": 0.3980815347721823,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.3318868252039576,\n \"acc_norm_stderr,none\": 0.005789900896595169,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.672,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.5187165775401069,\n\
\ \"acc_norm_stderr,none\": 0.03663608375537843\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.252,\n \"acc_norm_stderr,none\":\
\ 0.027513851933031318\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.52,\n\
\ \"acc_norm_stderr,none\": 0.03166085340849512\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.04,\n \"acc_norm_stderr,none\": 0.012418408411301325\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.552,\n \
\ \"acc_norm_stderr,none\": 0.03151438761115348\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\":\
\ 0.025537121574548162\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\":\
\ 0.020886382258673272\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.352,\n \"acc_norm_stderr,none\":\
\ 0.030266288057359866\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \
\ \"acc_norm,none\": 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115349\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \"\
\ - leaderboard_bbh_navigate\",\n \"acc_norm,none\": 0.42,\n \
\ \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.2602739726027397,\n \"acc_norm_stderr,none\": 0.03643903096750157\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.148,\n \"acc_norm_stderr,none\": 0.022503547243806186\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.152,\n \
\ \"acc_norm_stderr,none\": 0.022752024491765464\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.5617977528089888,\n \"acc_norm_stderr,none\"\
: 0.03729414592947274\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.24,\n\
\ \"acc_norm_stderr,none\": 0.027065293652238982\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\":\
\ 0.029844039047465857\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.29194630872483224,\n\
\ \"acc_norm_stderr,none\": 0.01316865684652682,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2727272727272727,\n \"acc_norm_stderr,none\": 0.03173071239071728\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.31684981684981683,\n\
\ \"acc_norm_stderr,none\": 0.019929048938214563\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2700892857142857,\n \"acc_norm_stderr,none\"\
: 0.021000749078822437\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.23844731977818853,\n \"prompt_level_strict_acc_stderr,none\": 0.01833788809424391,\n\
\ \"inst_level_strict_acc,none\": 0.37410071942446044,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.26247689463955637,\n \"prompt_level_loose_acc_stderr,none\": 0.0189337428760446,\n\
\ \"inst_level_loose_acc,none\": 0.3980815347721823,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.005287009063444109,\n \"exact_match_stderr,none\"\
: 0.0019938227617985627,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.003257328990228013,\n\
\ \"exact_match_stderr,none\": 0.003257328990228013\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \
\ \"exact_match,none\": 0.015151515151515152,\n \"exact_match_stderr,none\"\
: 0.01067276863717474\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.006493506493506494,\n \"exact_match_stderr,none\": 0.006493506493506494\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_prealgebra_hard\",\n \"exact_match,none\": 0.010362694300518135,\n\
\ \"exact_match_stderr,none\": 0.007308424386792209\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" - leaderboard_math_precalculus_hard\"\
,\n \"exact_match,none\": 0.007407407407407408,\n \"exact_match_stderr,none\"\
: 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.13871343085106383,\n\
\ \"acc_stderr,none\": 0.0031512454438591977\n },\n \"\
leaderboard_musr\": {\n \"acc_norm,none\": 0.39285714285714285,\n \
\ \"acc_norm_stderr,none\": 0.01732689349143748,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.26953125,\n\
\ \"acc_norm_stderr,none\": 0.02778659256840428\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\":\
\ 0.03063032594455827\n }\n },\n \"leaderboard\": {\n \"prompt_level_strict_acc,none\"\
: 0.23844731977818853,\n \"prompt_level_strict_acc_stderr,none\": 0.01833788809424391,\n\
\ \"acc_norm,none\": 0.3316902321961344,\n \"acc_norm_stderr,none\"\
: 0.005074965442699723,\n \"acc,none\": 0.13871343085106383,\n \"\
acc_stderr,none\": 0.0031512454438591977,\n \"inst_level_strict_acc,none\"\
: 0.37410071942446044,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_loose_acc,none\": 0.26247689463955637,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.0189337428760446,\n \"exact_match,none\": 0.005287009063444109,\n \
\ \"exact_match_stderr,none\": 0.0019938227617985627,\n \"inst_level_loose_acc,none\"\
: 0.3980815347721823,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.3318868252039576,\n \"acc_norm_stderr,none\": 0.005789900896595169,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.672,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.252,\n \"acc_norm_stderr,none\": 0.027513851933031318\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.52,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.04,\n \"acc_norm_stderr,none\": 0.012418408411301325\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\": 0.020886382258673272\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.352,\n \"acc_norm_stderr,none\": 0.030266288057359866\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115349\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2602739726027397,\n\
\ \"acc_norm_stderr,none\": 0.03643903096750157\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.148,\n \"acc_norm_stderr,none\": 0.022503547243806186\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.5617977528089888,\n \"acc_norm_stderr,none\"\
: 0.03729414592947274\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652238982\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.29194630872483224,\n\
\ \"acc_norm_stderr,none\": 0.01316865684652682,\n \"alias\": \" -\
\ leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"alias\"\
: \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2727272727272727,\n\
\ \"acc_norm_stderr,none\": 0.03173071239071728\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.31684981684981683,\n \"acc_norm_stderr,none\": 0.019929048938214563\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2700892857142857,\n \"acc_norm_stderr,none\"\
: 0.021000749078822437\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.23844731977818853,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01833788809424391,\n \
\ \"inst_level_strict_acc,none\": 0.37410071942446044,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.26247689463955637,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.0189337428760446,\n \"inst_level_loose_acc,none\"\
: 0.3980815347721823,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.005287009063444109,\n\
\ \"exact_match_stderr,none\": 0.0019938227617985627,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.003257328990228013,\n \"exact_match_stderr,none\": 0.003257328990228013\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.015151515151515152,\n \"exact_match_stderr,none\": 0.01067276863717474\n\
\ },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.006493506493506494,\n \"exact_match_stderr,none\": 0.006493506493506494\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.010362694300518135,\n \"exact_match_stderr,none\"\
: 0.007308424386792209\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.007407407407407408,\n \"exact_match_stderr,none\": 0.007407407407407408\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.13871343085106383,\n \"acc_stderr,none\": 0.0031512454438591977\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.39285714285714285,\n\
\ \"acc_norm_stderr,none\": 0.01732689349143748,\n \"alias\": \" -\
\ leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.26953125,\n \"acc_norm_stderr,none\": 0.02778659256840428\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\": 0.03063032594455827\n\
\ }\n}\n```"
repo_url: https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_TextVision
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-37-17.474204.jsonl'
- config_name: LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_37_17.474204
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-37-17.474204.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-37-17.474204.jsonl'
---
# Dataset Card for Evaluation run of LeroyDyer/SpydazWeb_AI_HumanAI_TextVision
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [LeroyDyer/SpydazWeb_AI_HumanAI_TextVision](https://huggingface.co/LeroyDyer/SpydazWeb_AI_HumanAI_TextVision)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_TextVision-details",
name="LeroyDyer__SpydazWeb_AI_HumanAI_TextVision__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-37-17.474204](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWeb_AI_HumanAI_TextVision-details/blob/main/LeroyDyer__SpydazWeb_AI_HumanAI_TextVision/results_2024-11-19T23-37-17.474204.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"prompt_level_strict_acc,none": 0.23844731977818853,
"prompt_level_strict_acc_stderr,none": 0.01833788809424391,
"acc_norm,none": 0.3316902321961344,
"acc_norm_stderr,none": 0.005074965442699723,
"acc,none": 0.13871343085106383,
"acc_stderr,none": 0.0031512454438591977,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.26247689463955637,
"prompt_level_loose_acc_stderr,none": 0.0189337428760446,
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.0019938227617985627,
"inst_level_loose_acc,none": 0.3980815347721823,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3318868252039576,
"acc_norm_stderr,none": 0.005789900896595169,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.672,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.252,
"acc_norm_stderr,none": 0.027513851933031318
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.04,
"acc_norm_stderr,none": 0.012418408411301325
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2602739726027397,
"acc_norm_stderr,none": 0.03643903096750157
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.148,
"acc_norm_stderr,none": 0.022503547243806186
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5617977528089888,
"acc_norm_stderr,none": 0.03729414592947274
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.29194630872483224,
"acc_norm_stderr,none": 0.01316865684652682,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071728
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.31684981684981683,
"acc_norm_stderr,none": 0.019929048938214563
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2700892857142857,
"acc_norm_stderr,none": 0.021000749078822437
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.23844731977818853,
"prompt_level_strict_acc_stderr,none": 0.01833788809424391,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.26247689463955637,
"prompt_level_loose_acc_stderr,none": 0.0189337428760446,
"inst_level_loose_acc,none": 0.3980815347721823,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.0019938227617985627,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.015151515151515152,
"exact_match_stderr,none": 0.01067276863717474
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.010362694300518135,
"exact_match_stderr,none": 0.007308424386792209
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.13871343085106383,
"acc_stderr,none": 0.0031512454438591977
},
"leaderboard_musr": {
"acc_norm,none": 0.39285714285714285,
"acc_norm_stderr,none": 0.01732689349143748,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.26953125,
"acc_norm_stderr,none": 0.02778659256840428
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.03063032594455827
}
},
"leaderboard": {
"prompt_level_strict_acc,none": 0.23844731977818853,
"prompt_level_strict_acc_stderr,none": 0.01833788809424391,
"acc_norm,none": 0.3316902321961344,
"acc_norm_stderr,none": 0.005074965442699723,
"acc,none": 0.13871343085106383,
"acc_stderr,none": 0.0031512454438591977,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.26247689463955637,
"prompt_level_loose_acc_stderr,none": 0.0189337428760446,
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.0019938227617985627,
"inst_level_loose_acc,none": 0.3980815347721823,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3318868252039576,
"acc_norm_stderr,none": 0.005789900896595169,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.672,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.252,
"acc_norm_stderr,none": 0.027513851933031318
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.04,
"acc_norm_stderr,none": 0.012418408411301325
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2602739726027397,
"acc_norm_stderr,none": 0.03643903096750157
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.148,
"acc_norm_stderr,none": 0.022503547243806186
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5617977528089888,
"acc_norm_stderr,none": 0.03729414592947274
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.29194630872483224,
"acc_norm_stderr,none": 0.01316865684652682,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071728
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.31684981684981683,
"acc_norm_stderr,none": 0.019929048938214563
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2700892857142857,
"acc_norm_stderr,none": 0.021000749078822437
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.23844731977818853,
"prompt_level_strict_acc_stderr,none": 0.01833788809424391,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.26247689463955637,
"prompt_level_loose_acc_stderr,none": 0.0189337428760446,
"inst_level_loose_acc,none": 0.3980815347721823,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.0019938227617985627,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.015151515151515152,
"exact_match_stderr,none": 0.01067276863717474
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.010362694300518135,
"exact_match_stderr,none": 0.007308424386792209
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.13871343085106383,
"acc_stderr,none": 0.0031512454438591977
},
"leaderboard_musr": {
"acc_norm,none": 0.39285714285714285,
"acc_norm_stderr,none": 0.01732689349143748,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.26953125,
"acc_norm_stderr,none": 0.02778659256840428
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.03063032594455827
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/LeroyDyer__SpydazWebAI_Human_AGI_001-details | open-llm-leaderboard | "2024-11-19T23:41:00Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:37:53Z" | ---
pretty_name: Evaluation run of LeroyDyer/SpydazWebAI_Human_AGI_001
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [LeroyDyer/SpydazWebAI_Human_AGI_001](https://huggingface.co/LeroyDyer/SpydazWebAI_Human_AGI_001)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/LeroyDyer__SpydazWebAI_Human_AGI_001-details\"\
,\n\tname=\"LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-37-52.829059](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWebAI_Human_AGI_001-details/blob/main/LeroyDyer__SpydazWebAI_Human_AGI_001/results_2024-11-19T23-37-52.829059.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_loose_acc,none\": 0.4136690647482014,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\": 0.14261968085106383,\n\
\ \"acc_stderr,none\": 0.003188053654084568,\n \"exact_match,none\"\
: 0.014350453172205438,\n \"exact_match_stderr,none\": 0.0032674983588001287,\n\
\ \"prompt_level_strict_acc,none\": 0.24953789279112754,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.018622404509805804,\n \"\
prompt_level_loose_acc,none\": 0.2902033271719039,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.019530856691222623,\n \"inst_level_strict_acc,none\": 0.37410071942446044,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\"\
: 0.3401219354001816,\n \"acc_norm_stderr,none\": 0.0051217073191174856,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.34108661690678704,\n \"acc_norm_stderr,none\"\
: 0.005853610410917754,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.688,\n\
\ \"acc_norm_stderr,none\": 0.029361067575219852\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.308,\n \"acc_norm_stderr,none\": 0.02925692860650181\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.564,\n\
\ \"acc_norm_stderr,none\": 0.03142556706028136\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.536,\n \"acc_norm_stderr,none\":\
\ 0.031603975145223735\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.544,\n \
\ \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \"\
leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\": \" \
\ - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.348,\n \"acc_norm_stderr,none\": 0.030186568464511673\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.244,\n\
\ \"acc_norm_stderr,none\": 0.02721799546455311\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.2808219178082192,\n \"acc_norm_stderr,none\"\
: 0.037320694849458984\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\":\
\ 0.02391551394448624\n },\n \"leaderboard_bbh_ruin_names\": {\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.112,\n \"acc_norm_stderr,none\": 0.019985536939171485\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.4606741573033708,\n\
\ \"acc_norm_stderr,none\": 0.03746587736387869\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\":\
\ 0.03164968895968774\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\":\
\ 0.02391551394448624\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\":\
\ 0.022995023034068682\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\":\
\ 0.029844039047465857\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2986577181208054,\n\
\ \"acc_norm_stderr,none\": 0.013268550684084248,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2777777777777778,\n \"acc_norm_stderr,none\": 0.03191178226713548\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.30952380952380953,\n\
\ \"acc_norm_stderr,none\": 0.01980264188017022\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.29464285714285715,\n \"acc_norm_stderr,none\"\
: 0.021562481080109767\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.24953789279112754,\n \"prompt_level_strict_acc_stderr,none\": 0.018622404509805804,\n\
\ \"inst_level_strict_acc,none\": 0.37410071942446044,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.2902033271719039,\n \"prompt_level_loose_acc_stderr,none\": 0.019530856691222623,\n\
\ \"inst_level_loose_acc,none\": 0.4136690647482014,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.014350453172205438,\n \"exact_match_stderr,none\"\
: 0.0032674983588001287,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.019543973941368076,\n\
\ \"exact_match_stderr,none\": 0.007913339243755165\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \
\ \"exact_match,none\": 0.022727272727272728,\n \"exact_match_stderr,none\"\
: 0.0130210469090637\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0035714285714285713,\n \"exact_match_stderr,none\"\
: 0.0035714285714285713\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"alias\": \" - leaderboard_math_num_theory_hard\",\n \
\ \"exact_match,none\": 0.025974025974025976,\n \"exact_match_stderr,none\"\
: 0.012859058999697068\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.02072538860103627,\n \"exact_match_stderr,none\"\
: 0.01028141701190903\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.007407407407407408,\n \"exact_match_stderr,none\"\
: 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.14261968085106383,\n\
\ \"acc_stderr,none\": 0.003188053654084568\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.39814814814814814,\n \"acc_norm_stderr,none\"\
: 0.017324563123870807,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.52,\n\
\ \"acc_norm_stderr,none\": 0.03166085340849512\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.2421875,\n \"acc_norm_stderr,none\"\
: 0.026827898476066977\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n\
\ }\n },\n \"leaderboard\": {\n \"inst_level_loose_acc,none\"\
: 0.4136690647482014,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"acc,none\": 0.14261968085106383,\n \"acc_stderr,none\": 0.003188053654084568,\n\
\ \"exact_match,none\": 0.014350453172205438,\n \"exact_match_stderr,none\"\
: 0.0032674983588001287,\n \"prompt_level_strict_acc,none\": 0.24953789279112754,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.018622404509805804,\n \
\ \"prompt_level_loose_acc,none\": 0.2902033271719039,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.019530856691222623,\n \"inst_level_strict_acc,none\": 0.37410071942446044,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\"\
: 0.3401219354001816,\n \"acc_norm_stderr,none\": 0.0051217073191174856,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.34108661690678704,\n \"acc_norm_stderr,none\": 0.005853610410917754,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.688,\n \"acc_norm_stderr,none\": 0.029361067575219852\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.308,\n \"acc_norm_stderr,none\": 0.02925692860650181\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028136\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.536,\n \"acc_norm_stderr,none\": 0.031603975145223735\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.348,\n \"acc_norm_stderr,none\": 0.030186568464511673\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.244,\n \"acc_norm_stderr,none\": 0.02721799546455311\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2808219178082192,\n\
\ \"acc_norm_stderr,none\": 0.037320694849458984\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.112,\n \"acc_norm_stderr,none\": 0.019985536939171485\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4606741573033708,\n \"acc_norm_stderr,none\"\
: 0.03746587736387869\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2986577181208054,\n\
\ \"acc_norm_stderr,none\": 0.013268550684084248,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2777777777777778,\n\
\ \"acc_norm_stderr,none\": 0.03191178226713548\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.30952380952380953,\n \"acc_norm_stderr,none\": 0.01980264188017022\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.29464285714285715,\n \"acc_norm_stderr,none\"\
: 0.021562481080109767\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.24953789279112754,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.018622404509805804,\n \
\ \"inst_level_strict_acc,none\": 0.37410071942446044,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.2902033271719039,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.019530856691222623,\n \"inst_level_loose_acc,none\"\
: 0.4136690647482014,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.014350453172205438,\n\
\ \"exact_match_stderr,none\": 0.0032674983588001287,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.019543973941368076,\n \"exact_match_stderr,none\": 0.007913339243755165\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.022727272727272728,\n \"exact_match_stderr,none\": 0.0130210469090637\n\
\ },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0035714285714285713,\n \"exact_match_stderr,none\": 0.0035714285714285713\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.025974025974025976,\n \"exact_match_stderr,none\"\
: 0.012859058999697068\n },\n \"leaderboard_math_prealgebra_hard\": {\n \
\ \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \"exact_match,none\"\
: 0.02072538860103627,\n \"exact_match_stderr,none\": 0.01028141701190903\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.007407407407407408,\n\
\ \"exact_match_stderr,none\": 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.14261968085106383,\n\
\ \"acc_stderr,none\": 0.003188053654084568\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.39814814814814814,\n \"acc_norm_stderr,none\"\
: 0.017324563123870807,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.52,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2421875,\n\
\ \"acc_norm_stderr,none\": 0.026827898476066977\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n }\n}\n```"
repo_url: https://huggingface.co/LeroyDyer/SpydazWebAI_Human_AGI_001
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-37-52.829059.jsonl'
- config_name: LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_37_52.829059
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-37-52.829059.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-37-52.829059.jsonl'
---
# Dataset Card for Evaluation run of LeroyDyer/SpydazWebAI_Human_AGI_001
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [LeroyDyer/SpydazWebAI_Human_AGI_001](https://huggingface.co/LeroyDyer/SpydazWebAI_Human_AGI_001)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/LeroyDyer__SpydazWebAI_Human_AGI_001-details",
name="LeroyDyer__SpydazWebAI_Human_AGI_001__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-37-52.829059](https://huggingface.co/datasets/open-llm-leaderboard/LeroyDyer__SpydazWebAI_Human_AGI_001-details/blob/main/LeroyDyer__SpydazWebAI_Human_AGI_001/results_2024-11-19T23-37-52.829059.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_loose_acc,none": 0.4136690647482014,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.14261968085106383,
"acc_stderr,none": 0.003188053654084568,
"exact_match,none": 0.014350453172205438,
"exact_match_stderr,none": 0.0032674983588001287,
"prompt_level_strict_acc,none": 0.24953789279112754,
"prompt_level_strict_acc_stderr,none": 0.018622404509805804,
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.019530856691222623,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3401219354001816,
"acc_norm_stderr,none": 0.0051217073191174856,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.34108661690678704,
"acc_norm_stderr,none": 0.005853610410917754,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.688,
"acc_norm_stderr,none": 0.029361067575219852
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.348,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.244,
"acc_norm_stderr,none": 0.02721799546455311
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2808219178082192,
"acc_norm_stderr,none": 0.037320694849458984
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4606741573033708,
"acc_norm_stderr,none": 0.03746587736387869
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2986577181208054,
"acc_norm_stderr,none": 0.013268550684084248,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2777777777777778,
"acc_norm_stderr,none": 0.03191178226713548
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.30952380952380953,
"acc_norm_stderr,none": 0.01980264188017022
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.29464285714285715,
"acc_norm_stderr,none": 0.021562481080109767
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.24953789279112754,
"prompt_level_strict_acc_stderr,none": 0.018622404509805804,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.019530856691222623,
"inst_level_loose_acc,none": 0.4136690647482014,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.014350453172205438,
"exact_match_stderr,none": 0.0032674983588001287,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.019543973941368076,
"exact_match_stderr,none": 0.007913339243755165
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.0130210469090637
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285713
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.025974025974025976,
"exact_match_stderr,none": 0.012859058999697068
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.02072538860103627,
"exact_match_stderr,none": 0.01028141701190903
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.14261968085106383,
"acc_stderr,none": 0.003188053654084568
},
"leaderboard_musr": {
"acc_norm,none": 0.39814814814814814,
"acc_norm_stderr,none": 0.017324563123870807,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2421875,
"acc_norm_stderr,none": 0.026827898476066977
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
}
},
"leaderboard": {
"inst_level_loose_acc,none": 0.4136690647482014,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.14261968085106383,
"acc_stderr,none": 0.003188053654084568,
"exact_match,none": 0.014350453172205438,
"exact_match_stderr,none": 0.0032674983588001287,
"prompt_level_strict_acc,none": 0.24953789279112754,
"prompt_level_strict_acc_stderr,none": 0.018622404509805804,
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.019530856691222623,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3401219354001816,
"acc_norm_stderr,none": 0.0051217073191174856,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.34108661690678704,
"acc_norm_stderr,none": 0.005853610410917754,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.688,
"acc_norm_stderr,none": 0.029361067575219852
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.348,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.244,
"acc_norm_stderr,none": 0.02721799546455311
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2808219178082192,
"acc_norm_stderr,none": 0.037320694849458984
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.112,
"acc_norm_stderr,none": 0.019985536939171485
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4606741573033708,
"acc_norm_stderr,none": 0.03746587736387869
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2986577181208054,
"acc_norm_stderr,none": 0.013268550684084248,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2777777777777778,
"acc_norm_stderr,none": 0.03191178226713548
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.30952380952380953,
"acc_norm_stderr,none": 0.01980264188017022
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.29464285714285715,
"acc_norm_stderr,none": 0.021562481080109767
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.24953789279112754,
"prompt_level_strict_acc_stderr,none": 0.018622404509805804,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.019530856691222623,
"inst_level_loose_acc,none": 0.4136690647482014,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.014350453172205438,
"exact_match_stderr,none": 0.0032674983588001287,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.019543973941368076,
"exact_match_stderr,none": 0.007913339243755165
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.0130210469090637
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285713
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.025974025974025976,
"exact_match_stderr,none": 0.012859058999697068
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.02072538860103627,
"exact_match_stderr,none": 0.01028141701190903
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.14261968085106383,
"acc_stderr,none": 0.003188053654084568
},
"leaderboard_musr": {
"acc_norm,none": 0.39814814814814814,
"acc_norm_stderr,none": 0.017324563123870807,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2421875,
"acc_norm_stderr,none": 0.026827898476066977
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7-details | open-llm-leaderboard | "2024-11-19T23:46:06Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:43:10Z" | ---
pretty_name: Evaluation run of jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.7
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.7](https://huggingface.co/jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.7)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7-details\"\
,\n\tname=\"jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-43-09.288044](https://huggingface.co/datasets/open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7-details/blob/main/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7/results_2024-11-19T23-43-09.288044.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"exact_match,none\": 0.26963746223564955,\n \"exact_match_stderr,none\"\
: 0.011184954631980942,\n \"acc_norm,none\": 0.4983785186146063,\n \
\ \"acc_norm_stderr,none\": 0.005429049922901389,\n \"acc,none\"\
: 0.42802526595744683,\n \"acc_stderr,none\": 0.00451099413054819,\n\
\ \"prompt_level_loose_acc,none\": 0.3844731977818854,\n \"\
prompt_level_loose_acc_stderr,none\": 0.020934357634584764,\n \"prompt_level_strict_acc,none\"\
: 0.34750462107208874,\n \"prompt_level_strict_acc_stderr,none\": 0.020491423653415698,\n\
\ \"inst_level_loose_acc,none\": 0.5275779376498801,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"inst_level_strict_acc,none\"\
: 0.4928057553956834,\n \"inst_level_strict_acc_stderr,none\": \"N/A\"\
,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5363652143725047,\n \"acc_norm_stderr,none\"\
: 0.006270943583167176,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.768,\n\
\ \"acc_norm_stderr,none\": 0.026750070374865202\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6096256684491979,\n \"acc_norm_stderr,none\"\
: 0.03576973947986408\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.656,\n\
\ \"acc_norm_stderr,none\": 0.03010450339231644\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.588,\n \"acc_norm_stderr,none\":\
\ 0.031191596026022818\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.56,\n \
\ \"acc_norm_stderr,none\": 0.03145724452223569\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\":\
\ 0.03167708558254714\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\":\
\ 0.03164968895968774\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.732,\n \"acc_norm_stderr,none\":\
\ 0.02806876238252672\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \
\ \"acc_norm,none\": 0.664,\n \"acc_norm_stderr,none\": 0.029933259094191533\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \"\
\ - leaderboard_bbh_navigate\",\n \"acc_norm,none\": 0.644,\n \
\ \"acc_norm_stderr,none\": 0.0303436806571532\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.5273972602739726,\n \"acc_norm_stderr,none\": 0.04146035831053314\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022818\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.616,\n \
\ \"acc_norm_stderr,none\": 0.030821679117375447\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.7191011235955056,\n \"acc_norm_stderr,none\"\
: 0.03378184395518332\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.684,\n \"acc_norm_stderr,none\": 0.02946265759857865\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.448,\n\
\ \"acc_norm_stderr,none\": 0.03151438761115349\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\":\
\ 0.02346526100207671\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\":\
\ \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\": 0.504,\n\
\ \"acc_norm_stderr,none\": 0.0316851985511992\n },\n \"\
leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3238255033557047,\n \
\ \"acc_norm_stderr,none\": 0.013561266337323075,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.35858585858585856,\n \"acc_norm_stderr,none\": 0.034169036403915276\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.30952380952380953,\n\
\ \"acc_norm_stderr,none\": 0.01980264188017022\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.32589285714285715,\n \"acc_norm_stderr,none\"\
: 0.02216910313464343\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.34750462107208874,\n \"prompt_level_strict_acc_stderr,none\": 0.020491423653415698,\n\
\ \"inst_level_strict_acc,none\": 0.49280575539568344,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.3844731977818854,\n \"prompt_level_loose_acc_stderr,none\": 0.02093435763458476,\n\
\ \"inst_level_loose_acc,none\": 0.5275779376498801,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.26963746223564955,\n \"exact_match_stderr,none\"\
: 0.011184954631980942,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.5179153094462541,\n\
\ \"exact_match_stderr,none\": 0.02856474366553855\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \" \
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.17073170731707318,\n \"exact_match_stderr,none\": 0.034066279591320504\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.14393939393939395,\n\
\ \"exact_match_stderr,none\": 0.030669482160081984\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.09642857142857143,\n \"exact_match_stderr,none\": 0.017671849720607317\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.18181818181818182,\n\
\ \"exact_match_stderr,none\": 0.031181560935001604\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.46113989637305697,\n \"exact_match_stderr,none\"\
: 0.03597524411734576\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.1037037037037037,\n \"exact_match_stderr,none\"\
: 0.02633725661744443\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.42802526595744683,\n\
\ \"acc_stderr,none\": 0.00451099413054819\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.48412698412698413,\n \"acc_norm_stderr,none\"\
: 0.018000464125634077,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.58,\n\
\ \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.39453125,\n \"acc_norm_stderr,none\"\
: 0.030606698150250366\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ }\n },\n \"leaderboard\": {\n \"exact_match,none\": 0.26963746223564955,\n\
\ \"exact_match_stderr,none\": 0.011184954631980942,\n \"acc_norm,none\"\
: 0.4983785186146063,\n \"acc_norm_stderr,none\": 0.005429049922901389,\n\
\ \"acc,none\": 0.42802526595744683,\n \"acc_stderr,none\": 0.00451099413054819,\n\
\ \"prompt_level_loose_acc,none\": 0.3844731977818854,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.020934357634584764,\n \"prompt_level_strict_acc,none\": 0.34750462107208874,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.020491423653415698,\n \
\ \"inst_level_loose_acc,none\": 0.5275779376498801,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"inst_level_strict_acc,none\": 0.4928057553956834,\n \
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"alias\": \"leaderboard\"\
\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.5363652143725047,\n\
\ \"acc_norm_stderr,none\": 0.006270943583167176,\n \"alias\": \"\
\ - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.768,\n \"acc_norm_stderr,none\": 0.026750070374865202\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6096256684491979,\n \"acc_norm_stderr,none\"\
: 0.03576973947986408\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.656,\n \"acc_norm_stderr,none\": 0.03010450339231644\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022818\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.56,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.732,\n \"acc_norm_stderr,none\": 0.02806876238252672\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.664,\n \"acc_norm_stderr,none\": 0.029933259094191533\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.644,\n \"acc_norm_stderr,none\": 0.0303436806571532\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.5273972602739726,\n\
\ \"acc_norm_stderr,none\": 0.04146035831053314\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022818\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.616,\n \"acc_norm_stderr,none\": 0.030821679117375447\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.7191011235955056,\n \"acc_norm_stderr,none\"\
: 0.03378184395518332\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.684,\n \"acc_norm_stderr,none\": 0.02946265759857865\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115349\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3238255033557047,\n\
\ \"acc_norm_stderr,none\": 0.013561266337323075,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.35858585858585856,\n\
\ \"acc_norm_stderr,none\": 0.034169036403915276\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.30952380952380953,\n \"acc_norm_stderr,none\": 0.01980264188017022\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.32589285714285715,\n \"acc_norm_stderr,none\"\
: 0.02216910313464343\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.34750462107208874,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.020491423653415698,\n \
\ \"inst_level_strict_acc,none\": 0.49280575539568344,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.3844731977818854,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.02093435763458476,\n \"inst_level_loose_acc,none\"\
: 0.5275779376498801,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.26963746223564955,\n\
\ \"exact_match_stderr,none\": 0.011184954631980942,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.5179153094462541,\n \"exact_match_stderr,none\": 0.02856474366553855\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.17073170731707318,\n \"exact_match_stderr,none\": 0.034066279591320504\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.14393939393939395,\n \"exact_match_stderr,none\"\
: 0.030669482160081984\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.09642857142857143,\n \"exact_match_stderr,none\"\
: 0.017671849720607317\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.18181818181818182,\n \"exact_match_stderr,none\": 0.031181560935001604\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.46113989637305697,\n \"exact_match_stderr,none\"\
: 0.03597524411734576\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.1037037037037037,\n \"exact_match_stderr,none\": 0.02633725661744443\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.42802526595744683,\n \"acc_stderr,none\": 0.00451099413054819\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.48412698412698413,\n\
\ \"acc_norm_stderr,none\": 0.018000464125634077,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"alias\": \" - leaderboard_musr_object_placements\",\n \"\
acc_norm,none\": 0.39453125,\n \"acc_norm_stderr,none\": 0.030606698150250366\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ }\n}\n```"
repo_url: https://huggingface.co/jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.7
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-43-09.288044.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_43_09.288044
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-43-09.288044.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-43-09.288044.jsonl'
---
# Dataset Card for Evaluation run of jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.7
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.7](https://huggingface.co/jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.7)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7-details",
name="jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-43-09.288044](https://huggingface.co/datasets/open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7-details/blob/main/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.7/results_2024-11-19T23-43-09.288044.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"exact_match,none": 0.26963746223564955,
"exact_match_stderr,none": 0.011184954631980942,
"acc_norm,none": 0.4983785186146063,
"acc_norm_stderr,none": 0.005429049922901389,
"acc,none": 0.42802526595744683,
"acc_stderr,none": 0.00451099413054819,
"prompt_level_loose_acc,none": 0.3844731977818854,
"prompt_level_loose_acc_stderr,none": 0.020934357634584764,
"prompt_level_strict_acc,none": 0.34750462107208874,
"prompt_level_strict_acc_stderr,none": 0.020491423653415698,
"inst_level_loose_acc,none": 0.5275779376498801,
"inst_level_loose_acc_stderr,none": "N/A",
"inst_level_strict_acc,none": 0.4928057553956834,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5363652143725047,
"acc_norm_stderr,none": 0.006270943583167176,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.768,
"acc_norm_stderr,none": 0.026750070374865202
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6096256684491979,
"acc_norm_stderr,none": 0.03576973947986408
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.656,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.732,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.644,
"acc_norm_stderr,none": 0.0303436806571532
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.5273972602739726,
"acc_norm_stderr,none": 0.04146035831053314
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.616,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.7191011235955056,
"acc_norm_stderr,none": 0.03378184395518332
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.684,
"acc_norm_stderr,none": 0.02946265759857865
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3238255033557047,
"acc_norm_stderr,none": 0.013561266337323075,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.35858585858585856,
"acc_norm_stderr,none": 0.034169036403915276
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.30952380952380953,
"acc_norm_stderr,none": 0.01980264188017022
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.32589285714285715,
"acc_norm_stderr,none": 0.02216910313464343
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.34750462107208874,
"prompt_level_strict_acc_stderr,none": 0.020491423653415698,
"inst_level_strict_acc,none": 0.49280575539568344,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.3844731977818854,
"prompt_level_loose_acc_stderr,none": 0.02093435763458476,
"inst_level_loose_acc,none": 0.5275779376498801,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.26963746223564955,
"exact_match_stderr,none": 0.011184954631980942,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.5179153094462541,
"exact_match_stderr,none": 0.02856474366553855
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.17073170731707318,
"exact_match_stderr,none": 0.034066279591320504
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.14393939393939395,
"exact_match_stderr,none": 0.030669482160081984
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.09642857142857143,
"exact_match_stderr,none": 0.017671849720607317
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.18181818181818182,
"exact_match_stderr,none": 0.031181560935001604
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.46113989637305697,
"exact_match_stderr,none": 0.03597524411734576
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.1037037037037037,
"exact_match_stderr,none": 0.02633725661744443
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.42802526595744683,
"acc_stderr,none": 0.00451099413054819
},
"leaderboard_musr": {
"acc_norm,none": 0.48412698412698413,
"acc_norm_stderr,none": 0.018000464125634077,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.39453125,
"acc_norm_stderr,none": 0.030606698150250366
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.03166085340849512
}
},
"leaderboard": {
"exact_match,none": 0.26963746223564955,
"exact_match_stderr,none": 0.011184954631980942,
"acc_norm,none": 0.4983785186146063,
"acc_norm_stderr,none": 0.005429049922901389,
"acc,none": 0.42802526595744683,
"acc_stderr,none": 0.00451099413054819,
"prompt_level_loose_acc,none": 0.3844731977818854,
"prompt_level_loose_acc_stderr,none": 0.020934357634584764,
"prompt_level_strict_acc,none": 0.34750462107208874,
"prompt_level_strict_acc_stderr,none": 0.020491423653415698,
"inst_level_loose_acc,none": 0.5275779376498801,
"inst_level_loose_acc_stderr,none": "N/A",
"inst_level_strict_acc,none": 0.4928057553956834,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5363652143725047,
"acc_norm_stderr,none": 0.006270943583167176,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.768,
"acc_norm_stderr,none": 0.026750070374865202
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6096256684491979,
"acc_norm_stderr,none": 0.03576973947986408
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.656,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.732,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.644,
"acc_norm_stderr,none": 0.0303436806571532
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.5273972602739726,
"acc_norm_stderr,none": 0.04146035831053314
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.616,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.7191011235955056,
"acc_norm_stderr,none": 0.03378184395518332
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.684,
"acc_norm_stderr,none": 0.02946265759857865
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3238255033557047,
"acc_norm_stderr,none": 0.013561266337323075,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.35858585858585856,
"acc_norm_stderr,none": 0.034169036403915276
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.30952380952380953,
"acc_norm_stderr,none": 0.01980264188017022
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.32589285714285715,
"acc_norm_stderr,none": 0.02216910313464343
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.34750462107208874,
"prompt_level_strict_acc_stderr,none": 0.020491423653415698,
"inst_level_strict_acc,none": 0.49280575539568344,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.3844731977818854,
"prompt_level_loose_acc_stderr,none": 0.02093435763458476,
"inst_level_loose_acc,none": 0.5275779376498801,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.26963746223564955,
"exact_match_stderr,none": 0.011184954631980942,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.5179153094462541,
"exact_match_stderr,none": 0.02856474366553855
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.17073170731707318,
"exact_match_stderr,none": 0.034066279591320504
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.14393939393939395,
"exact_match_stderr,none": 0.030669482160081984
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.09642857142857143,
"exact_match_stderr,none": 0.017671849720607317
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.18181818181818182,
"exact_match_stderr,none": 0.031181560935001604
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.46113989637305697,
"exact_match_stderr,none": 0.03597524411734576
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.1037037037037037,
"exact_match_stderr,none": 0.02633725661744443
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.42802526595744683,
"acc_stderr,none": 0.00451099413054819
},
"leaderboard_musr": {
"acc_norm,none": 0.48412698412698413,
"acc_norm_stderr,none": 0.018000464125634077,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.39453125,
"acc_norm_stderr,none": 0.030606698150250366
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.03166085340849512
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/newsbang__Homer-v0.3-Qwen2.5-7B-details | open-llm-leaderboard | "2024-11-19T23:46:50Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:43:33Z" | ---
pretty_name: Evaluation run of newsbang/Homer-v0.3-Qwen2.5-7B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [newsbang/Homer-v0.3-Qwen2.5-7B](https://huggingface.co/newsbang/Homer-v0.3-Qwen2.5-7B)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/newsbang__Homer-v0.3-Qwen2.5-7B-details\"\
,\n\tname=\"newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-43-32.799534](https://huggingface.co/datasets/open-llm-leaderboard/newsbang__Homer-v0.3-Qwen2.5-7B-details/blob/main/newsbang__Homer-v0.3-Qwen2.5-7B/results_2024-11-19T23-43-32.799534.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.5047347256453496,\n \"acc_norm_stderr,none\"\
: 0.005337291918918776,\n \"exact_match,none\": 0.29531722054380666,\n\
\ \"exact_match_stderr,none\": 0.011288626902602862,\n \"\
inst_level_loose_acc,none\": 0.60431654676259,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"acc,none\": 0.445561835106383,\n \"acc_stderr,none\"\
: 0.0045313716925371285,\n \"prompt_level_loose_acc,none\": 0.4824399260628466,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.02150330051338897,\n \
\ \"prompt_level_strict_acc,none\": 0.45286506469500926,\n \"\
prompt_level_strict_acc_stderr,none\": 0.02142075394952955,\n \"inst_level_strict_acc,none\"\
: 0.5779376498800959,\n \"inst_level_strict_acc_stderr,none\": \"N/A\"\
,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5441763582711335,\n \"acc_norm_stderr,none\"\
: 0.006121813074595964,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.856,\n\
\ \"acc_norm_stderr,none\": 0.022249407735450245\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6042780748663101,\n \"acc_norm_stderr,none\"\
: 0.035855600715925424\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.66,\n\
\ \"acc_norm_stderr,none\": 0.030020073605457876\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.612,\n \"acc_norm_stderr,none\":\
\ 0.030881038748993974\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.58,\n \
\ \"acc_norm_stderr,none\": 0.03127799950463661\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\":\
\ 0.031563285061213475\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\":\
\ 0.0316851985511992\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.8,\n \"acc_norm_stderr,none\": 0.02534897002097912\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.644,\n \"acc_norm_stderr,none\": 0.0303436806571532\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.672,\n \"acc_norm_stderr,none\":\
\ 0.029752391824475363\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.6301369863013698,\n \"acc_norm_stderr,none\": 0.04009165058801775\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.636,\n \"acc_norm_stderr,none\": 0.030491555220405475\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.568,\n \
\ \"acc_norm_stderr,none\": 0.03139181076542941\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
,\n \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\":\
\ 0.03164968895968774\n },\n \"leaderboard_bbh_snarks\": {\n \
\ \"alias\": \" - leaderboard_bbh_snarks\",\n \"acc_norm,none\"\
: 0.6910112359550562,\n \"acc_norm_stderr,none\": 0.0347317978779636\n\
\ },\n \"leaderboard_bbh_sports_understanding\": {\n \"\
alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.736,\n \"acc_norm_stderr,none\": 0.027934518957690866\n },\n\
\ \"leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" -\
\ leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.368,\n\
\ \"acc_norm_stderr,none\": 0.03056207062099311\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\":\
\ 0.022752024491765464\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\":\
\ 0.025901884690541117\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3338926174496644,\n\
\ \"acc_norm_stderr,none\": 0.013673387195954708,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.35353535353535354,\n \"acc_norm_stderr,none\": 0.03406086723547151\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.3241758241758242,\n\
\ \"acc_norm_stderr,none\": 0.020049748182808566\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.33705357142857145,\n \"acc_norm_stderr,none\"\
: 0.02235810146577637\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.45286506469500926,\n \"prompt_level_strict_acc_stderr,none\": 0.02142075394952955,\n\
\ \"inst_level_strict_acc,none\": 0.5779376498800959,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.4824399260628466,\n \"prompt_level_loose_acc_stderr,none\": 0.02150330051338897,\n\
\ \"inst_level_loose_acc,none\": 0.60431654676259,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.29531722054380666,\n \"exact_match_stderr,none\": 0.011288626902602862,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_algebra_hard\",\n \
\ \"exact_match,none\": 0.5700325732899023,\n \"exact_match_stderr,none\"\
: 0.02830133364131638\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.17073170731707318,\n \"exact_match_stderr,none\"\
: 0.034066279591320504\n },\n \"leaderboard_math_geometry_hard\":\
\ {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \
\ \"exact_match,none\": 0.1590909090909091,\n \"exact_match_stderr,none\"\
: 0.03195667292673137\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.08928571428571429,\n \"exact_match_stderr,none\"\
: 0.01707182544577207\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"alias\": \" - leaderboard_math_num_theory_hard\",\n \
\ \"exact_match,none\": 0.2532467532467532,\n \"exact_match_stderr,none\"\
: 0.035157241113655854\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.5025906735751295,\n \"exact_match_stderr,none\"\
: 0.03608390745384487\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.0962962962962963,\n \"exact_match_stderr,none\"\
: 0.02548387251435117\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.445561835106383,\n\
\ \"acc_stderr,none\": 0.0045313716925371285\n },\n \"\
leaderboard_musr\": {\n \"acc_norm,none\": 0.47354497354497355,\n \
\ \"acc_norm_stderr,none\": 0.017916034877238786,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.37109375,\n\
\ \"acc_norm_stderr,none\": 0.03025273792250212\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\":\
\ 0.03162125257572558\n }\n },\n \"leaderboard\": {\n \"acc_norm,none\"\
: 0.5047347256453496,\n \"acc_norm_stderr,none\": 0.005337291918918776,\n\
\ \"exact_match,none\": 0.29531722054380666,\n \"exact_match_stderr,none\"\
: 0.011288626902602862,\n \"inst_level_loose_acc,none\": 0.60431654676259,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\": 0.445561835106383,\n\
\ \"acc_stderr,none\": 0.0045313716925371285,\n \"prompt_level_loose_acc,none\"\
: 0.4824399260628466,\n \"prompt_level_loose_acc_stderr,none\": 0.02150330051338897,\n\
\ \"prompt_level_strict_acc,none\": 0.45286506469500926,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.02142075394952955,\n \"inst_level_strict_acc,none\": 0.5779376498800959,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"alias\": \"\
leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.5441763582711335,\n\
\ \"acc_norm_stderr,none\": 0.006121813074595964,\n \"alias\": \"\
\ - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.856,\n \"acc_norm_stderr,none\": 0.022249407735450245\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6042780748663101,\n \"acc_norm_stderr,none\"\
: 0.035855600715925424\n },\n \"leaderboard_bbh_date_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.66,\n \"acc_norm_stderr,none\": 0.030020073605457876\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.612,\n \"acc_norm_stderr,none\": 0.030881038748993974\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.58,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.8,\n \"acc_norm_stderr,none\": 0.02534897002097912\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"\
acc_norm,none\": 0.644,\n \"acc_norm_stderr,none\": 0.0303436806571532\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.672,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.6301369863013698,\n\
\ \"acc_norm_stderr,none\": 0.04009165058801775\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.636,\n \"acc_norm_stderr,none\": 0.030491555220405475\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.568,\n \"acc_norm_stderr,none\": 0.03139181076542941\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6910112359550562,\n \"acc_norm_stderr,none\"\
: 0.0347317978779636\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.736,\n \"acc_norm_stderr,none\": 0.027934518957690866\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\": 0.025901884690541117\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3338926174496644,\n\
\ \"acc_norm_stderr,none\": 0.013673387195954708,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.35353535353535354,\n\
\ \"acc_norm_stderr,none\": 0.03406086723547151\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.3241758241758242,\n \"acc_norm_stderr,none\": 0.020049748182808566\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.33705357142857145,\n \"acc_norm_stderr,none\"\
: 0.02235810146577637\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.45286506469500926,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.02142075394952955,\n \
\ \"inst_level_strict_acc,none\": 0.5779376498800959,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.4824399260628466,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.02150330051338897,\n \"inst_level_loose_acc,none\"\
: 0.60431654676259,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.29531722054380666,\n\
\ \"exact_match_stderr,none\": 0.011288626902602862,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.5700325732899023,\n \"exact_match_stderr,none\": 0.02830133364131638\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.17073170731707318,\n \"exact_match_stderr,none\": 0.034066279591320504\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.1590909090909091,\n \"exact_match_stderr,none\"\
: 0.03195667292673137\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.08928571428571429,\n \"exact_match_stderr,none\"\
: 0.01707182544577207\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.2532467532467532,\n \"exact_match_stderr,none\": 0.035157241113655854\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.5025906735751295,\n \"exact_match_stderr,none\"\
: 0.03608390745384487\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.0962962962962963,\n \"exact_match_stderr,none\": 0.02548387251435117\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.445561835106383,\n \"acc_stderr,none\": 0.0045313716925371285\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.47354497354497355,\n\
\ \"acc_norm_stderr,none\": 0.017916034877238786,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014505\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.37109375,\n \"acc_norm_stderr,none\": 0.03025273792250212\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\": 0.03162125257572558\n\
\ }\n}\n```"
repo_url: https://huggingface.co/newsbang/Homer-v0.3-Qwen2.5-7B
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-43-32.799534.jsonl'
- config_name: newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_43_32.799534
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-43-32.799534.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-43-32.799534.jsonl'
---
# Dataset Card for Evaluation run of newsbang/Homer-v0.3-Qwen2.5-7B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [newsbang/Homer-v0.3-Qwen2.5-7B](https://huggingface.co/newsbang/Homer-v0.3-Qwen2.5-7B)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/newsbang__Homer-v0.3-Qwen2.5-7B-details",
name="newsbang__Homer-v0.3-Qwen2.5-7B__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-43-32.799534](https://huggingface.co/datasets/open-llm-leaderboard/newsbang__Homer-v0.3-Qwen2.5-7B-details/blob/main/newsbang__Homer-v0.3-Qwen2.5-7B/results_2024-11-19T23-43-32.799534.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.5047347256453496,
"acc_norm_stderr,none": 0.005337291918918776,
"exact_match,none": 0.29531722054380666,
"exact_match_stderr,none": 0.011288626902602862,
"inst_level_loose_acc,none": 0.60431654676259,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.445561835106383,
"acc_stderr,none": 0.0045313716925371285,
"prompt_level_loose_acc,none": 0.4824399260628466,
"prompt_level_loose_acc_stderr,none": 0.02150330051338897,
"prompt_level_strict_acc,none": 0.45286506469500926,
"prompt_level_strict_acc_stderr,none": 0.02142075394952955,
"inst_level_strict_acc,none": 0.5779376498800959,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5441763582711335,
"acc_norm_stderr,none": 0.006121813074595964,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.856,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.035855600715925424
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.66,
"acc_norm_stderr,none": 0.030020073605457876
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.612,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.8,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.644,
"acc_norm_stderr,none": 0.0303436806571532
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.672,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.6301369863013698,
"acc_norm_stderr,none": 0.04009165058801775
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.636,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6910112359550562,
"acc_norm_stderr,none": 0.0347317978779636
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.736,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3338926174496644,
"acc_norm_stderr,none": 0.013673387195954708,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.35353535353535354,
"acc_norm_stderr,none": 0.03406086723547151
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3241758241758242,
"acc_norm_stderr,none": 0.020049748182808566
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.33705357142857145,
"acc_norm_stderr,none": 0.02235810146577637
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.45286506469500926,
"prompt_level_strict_acc_stderr,none": 0.02142075394952955,
"inst_level_strict_acc,none": 0.5779376498800959,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.4824399260628466,
"prompt_level_loose_acc_stderr,none": 0.02150330051338897,
"inst_level_loose_acc,none": 0.60431654676259,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.29531722054380666,
"exact_match_stderr,none": 0.011288626902602862,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.5700325732899023,
"exact_match_stderr,none": 0.02830133364131638
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.17073170731707318,
"exact_match_stderr,none": 0.034066279591320504
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.1590909090909091,
"exact_match_stderr,none": 0.03195667292673137
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.08928571428571429,
"exact_match_stderr,none": 0.01707182544577207
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.2532467532467532,
"exact_match_stderr,none": 0.035157241113655854
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.5025906735751295,
"exact_match_stderr,none": 0.03608390745384487
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0962962962962963,
"exact_match_stderr,none": 0.02548387251435117
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.445561835106383,
"acc_stderr,none": 0.0045313716925371285
},
"leaderboard_musr": {
"acc_norm,none": 0.47354497354497355,
"acc_norm_stderr,none": 0.017916034877238786,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.37109375,
"acc_norm_stderr,none": 0.03025273792250212
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
}
},
"leaderboard": {
"acc_norm,none": 0.5047347256453496,
"acc_norm_stderr,none": 0.005337291918918776,
"exact_match,none": 0.29531722054380666,
"exact_match_stderr,none": 0.011288626902602862,
"inst_level_loose_acc,none": 0.60431654676259,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.445561835106383,
"acc_stderr,none": 0.0045313716925371285,
"prompt_level_loose_acc,none": 0.4824399260628466,
"prompt_level_loose_acc_stderr,none": 0.02150330051338897,
"prompt_level_strict_acc,none": 0.45286506469500926,
"prompt_level_strict_acc_stderr,none": 0.02142075394952955,
"inst_level_strict_acc,none": 0.5779376498800959,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5441763582711335,
"acc_norm_stderr,none": 0.006121813074595964,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.856,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.035855600715925424
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.66,
"acc_norm_stderr,none": 0.030020073605457876
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.612,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.58,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.8,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.644,
"acc_norm_stderr,none": 0.0303436806571532
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.672,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.6301369863013698,
"acc_norm_stderr,none": 0.04009165058801775
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.636,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6910112359550562,
"acc_norm_stderr,none": 0.0347317978779636
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.736,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3338926174496644,
"acc_norm_stderr,none": 0.013673387195954708,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.35353535353535354,
"acc_norm_stderr,none": 0.03406086723547151
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3241758241758242,
"acc_norm_stderr,none": 0.020049748182808566
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.33705357142857145,
"acc_norm_stderr,none": 0.02235810146577637
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.45286506469500926,
"prompt_level_strict_acc_stderr,none": 0.02142075394952955,
"inst_level_strict_acc,none": 0.5779376498800959,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.4824399260628466,
"prompt_level_loose_acc_stderr,none": 0.02150330051338897,
"inst_level_loose_acc,none": 0.60431654676259,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.29531722054380666,
"exact_match_stderr,none": 0.011288626902602862,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.5700325732899023,
"exact_match_stderr,none": 0.02830133364131638
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.17073170731707318,
"exact_match_stderr,none": 0.034066279591320504
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.1590909090909091,
"exact_match_stderr,none": 0.03195667292673137
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.08928571428571429,
"exact_match_stderr,none": 0.01707182544577207
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.2532467532467532,
"exact_match_stderr,none": 0.035157241113655854
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.5025906735751295,
"exact_match_stderr,none": 0.03608390745384487
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0962962962962963,
"exact_match_stderr,none": 0.02548387251435117
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.445561835106383,
"acc_stderr,none": 0.0045313716925371285
},
"leaderboard_musr": {
"acc_norm,none": 0.47354497354497355,
"acc_norm_stderr,none": 0.017916034877238786,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.37109375,
"acc_norm_stderr,none": 0.03025273792250212
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT-details | open-llm-leaderboard | "2024-11-20T00:01:33Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:58:31Z" | ---
pretty_name: Evaluation run of nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT](https://huggingface.co/nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT-details\"\
,\n\tname=\"nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-19T23-58-30.543865](https://huggingface.co/datasets/open-llm-leaderboard/nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT-details/blob/main/nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT/results_2024-11-19T23-58-30.543865.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"exact_match,none\": 0.024924471299093656,\n \"exact_match_stderr,none\"\
: 0.004246719185859959,\n \"prompt_level_loose_acc,none\": 0.5545286506469501,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.02138823777906319,\n \
\ \"acc,none\": 0.2728557180851064,\n \"acc_stderr,none\": 0.004060934297107003,\n\
\ \"inst_level_loose_acc,none\": 0.6678657074340527,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.5138632162661737,\n \"prompt_level_strict_acc_stderr,none\": 0.0215083020678561,\n\
\ \"inst_level_strict_acc,none\": 0.6294964028776978,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\": 0.3877286288753405,\n\
\ \"acc_norm_stderr,none\": 0.0052581971394649075,\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.40704738760631837,\n \"acc_norm_stderr,none\": 0.0060823370750310475,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.784,\n \"acc_norm_stderr,none\": 0.02607865766373279\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.5721925133689839,\n\
\ \"acc_norm_stderr,none\": 0.03627762136497335\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.304,\n \"acc_norm_stderr,none\":\
\ 0.02915021337415965\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.48,\n\
\ \"acc_norm_stderr,none\": 0.03166085340849512\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.312,\n \"acc_norm_stderr,none\":\
\ 0.02936106757521985\n },\n \"leaderboard_bbh_hyperbaton\": {\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.236,\n \"acc_norm_stderr,none\": 0.026909337594953852\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.428,\n \"acc_norm_stderr,none\": 0.031355968923772626\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.452,\n \"acc_norm_stderr,none\": 0.03153986449255664\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.388,\n\
\ \"acc_norm_stderr,none\": 0.030881038748993974\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.2876712328767123,\n \"acc_norm_stderr,none\"\
: 0.037592781281728736\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\":\
\ 0.029752391824475363\n },\n \"leaderboard_bbh_ruin_names\": {\n\
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.48314606741573035,\n\
\ \"acc_norm_stderr,none\": 0.037560944447344834\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.832,\n \"acc_norm_stderr,none\":\
\ 0.023692813205492536\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\":\
\ 0.02455581299422255\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.268,\n \"acc_norm_stderr,none\":\
\ 0.02806876238252672\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2835570469798658,\n\
\ \"acc_norm_stderr,none\": 0.013061700919420478,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2727272727272727,\n \"acc_norm_stderr,none\": 0.03173071239071728\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.3021978021978022,\n\
\ \"acc_norm_stderr,none\": 0.019670416969439074\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.265625,\n \"acc_norm_stderr,none\"\
: 0.02089005840079951\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.5138632162661737,\n \"prompt_level_strict_acc_stderr,none\": 0.0215083020678561,\n\
\ \"inst_level_strict_acc,none\": 0.6294964028776978,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.5545286506469501,\n \"prompt_level_loose_acc_stderr,none\": 0.02138823777906319,\n\
\ \"inst_level_loose_acc,none\": 0.6678657074340527,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.024924471299093656,\n \"exact_match_stderr,none\"\
: 0.004246719185859959,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.03908794788273615,\n\
\ \"exact_match_stderr,none\": 0.01107905050865708\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \" \
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.016260162601626018,\n \"exact_match_stderr,none\": 0.011450452676925654\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.007575757575757576,\n\
\ \"exact_match_stderr,none\": 0.007575757575757577\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.007142857142857143,\n \"exact_match_stderr,none\": 0.005041703051390571\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.006493506493506494,\n\
\ \"exact_match_stderr,none\": 0.006493506493506494\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.07253886010362694,\n \"exact_match_stderr,none\"\
: 0.018718998520678213\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.007407407407407408,\n \"exact_match_stderr,none\"\
: 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.2728557180851064,\n\
\ \"acc_stderr,none\": 0.004060934297107003\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.40476190476190477,\n \"acc_norm_stderr,none\"\
: 0.01739317377016208,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.536,\n\
\ \"acc_norm_stderr,none\": 0.031603975145223735\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.2578125,\n \"acc_norm_stderr,none\"\
: 0.027392944192695272\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.424,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ }\n },\n \"leaderboard\": {\n \"exact_match,none\": 0.024924471299093656,\n\
\ \"exact_match_stderr,none\": 0.004246719185859959,\n \"prompt_level_loose_acc,none\"\
: 0.5545286506469501,\n \"prompt_level_loose_acc_stderr,none\": 0.02138823777906319,\n\
\ \"acc,none\": 0.2728557180851064,\n \"acc_stderr,none\": 0.004060934297107003,\n\
\ \"inst_level_loose_acc,none\": 0.6678657074340527,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_strict_acc,none\": 0.5138632162661737,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.0215083020678561,\n \"inst_level_strict_acc,none\"\
: 0.6294964028776978,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"acc_norm,none\": 0.3877286288753405,\n \"acc_norm_stderr,none\"\
: 0.0052581971394649075,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.40704738760631837,\n \"acc_norm_stderr,none\"\
: 0.0060823370750310475,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.784,\n \"acc_norm_stderr,none\": 0.02607865766373279\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5721925133689839,\n \"acc_norm_stderr,none\"\
: 0.03627762136497335\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.304,\n \"acc_norm_stderr,none\": 0.02915021337415965\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.312,\n \"acc_norm_stderr,none\": 0.02936106757521985\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.236,\n \"acc_norm_stderr,none\": 0.026909337594953852\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.428,\n \"acc_norm_stderr,none\": 0.031355968923772626\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.452,\n \"acc_norm_stderr,none\": 0.03153986449255664\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\": 0.030881038748993974\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2876712328767123,\n\
\ \"acc_norm_stderr,none\": 0.037592781281728736\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.48314606741573035,\n \"acc_norm_stderr,none\"\
: 0.037560944447344834\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.832,\n \"acc_norm_stderr,none\": 0.023692813205492536\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422255\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.268,\n \"acc_norm_stderr,none\": 0.02806876238252672\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2835570469798658,\n\
\ \"acc_norm_stderr,none\": 0.013061700919420478,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2727272727272727,\n\
\ \"acc_norm_stderr,none\": 0.03173071239071728\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.3021978021978022,\n \"acc_norm_stderr,none\": 0.019670416969439074\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.265625,\n \"acc_norm_stderr,none\": 0.02089005840079951\n\
\ },\n \"leaderboard_ifeval\": {\n \"alias\": \" - leaderboard_ifeval\"\
,\n \"prompt_level_strict_acc,none\": 0.5138632162661737,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.0215083020678561,\n \"inst_level_strict_acc,none\": 0.6294964028776978,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.5545286506469501,\n \"prompt_level_loose_acc_stderr,none\": 0.02138823777906319,\n\
\ \"inst_level_loose_acc,none\": 0.6678657074340527,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.024924471299093656,\n \"exact_match_stderr,none\": 0.004246719185859959,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.03908794788273615,\n \"exact_match_stderr,none\": 0.01107905050865708\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.016260162601626018,\n \"exact_match_stderr,none\": 0.011450452676925654\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.007575757575757576,\n \"exact_match_stderr,none\"\
: 0.007575757575757577\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.007142857142857143,\n \"exact_match_stderr,none\"\
: 0.005041703051390571\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.006493506493506494,\n \"exact_match_stderr,none\": 0.006493506493506494\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.07253886010362694,\n \"exact_match_stderr,none\"\
: 0.018718998520678213\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.007407407407407408,\n \"exact_match_stderr,none\": 0.007407407407407408\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.2728557180851064,\n \"acc_stderr,none\": 0.004060934297107003\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.40476190476190477,\n\
\ \"acc_norm_stderr,none\": 0.01739317377016208,\n \"alias\": \" -\
\ leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.536,\n \"acc_norm_stderr,none\": 0.031603975145223735\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.2578125,\n \"acc_norm_stderr,none\": 0.027392944192695272\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.424,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ }\n}\n```"
repo_url: https://huggingface.co/nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_navigate
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_snarks
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_gpqa_extended
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_gpqa_main
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_ifeval
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_mmlu_pro
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_musr_object_placements
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-19T23-58-30.543865.jsonl'
- config_name: nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_19T23_58_30.543865
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-58-30.543865.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-19T23-58-30.543865.jsonl'
---
# Dataset Card for Evaluation run of nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT](https://huggingface.co/nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT-details",
name="nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-19T23-58-30.543865](https://huggingface.co/datasets/open-llm-leaderboard/nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT-details/blob/main/nbeerbower__Mistral-Gutenberg-Doppel-7B-FFT/results_2024-11-19T23-58-30.543865.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"exact_match,none": 0.024924471299093656,
"exact_match_stderr,none": 0.004246719185859959,
"prompt_level_loose_acc,none": 0.5545286506469501,
"prompt_level_loose_acc_stderr,none": 0.02138823777906319,
"acc,none": 0.2728557180851064,
"acc_stderr,none": 0.004060934297107003,
"inst_level_loose_acc,none": 0.6678657074340527,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.5138632162661737,
"prompt_level_strict_acc_stderr,none": 0.0215083020678561,
"inst_level_strict_acc,none": 0.6294964028776978,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3877286288753405,
"acc_norm_stderr,none": 0.0052581971394649075,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.40704738760631837,
"acc_norm_stderr,none": 0.0060823370750310475,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.784,
"acc_norm_stderr,none": 0.02607865766373279
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5721925133689839,
"acc_norm_stderr,none": 0.03627762136497335
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.02915021337415965
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.236,
"acc_norm_stderr,none": 0.026909337594953852
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.428,
"acc_norm_stderr,none": 0.031355968923772626
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.452,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2876712328767123,
"acc_norm_stderr,none": 0.037592781281728736
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.48314606741573035,
"acc_norm_stderr,none": 0.037560944447344834
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.832,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.268,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2835570469798658,
"acc_norm_stderr,none": 0.013061700919420478,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071728
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3021978021978022,
"acc_norm_stderr,none": 0.019670416969439074
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.265625,
"acc_norm_stderr,none": 0.02089005840079951
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5138632162661737,
"prompt_level_strict_acc_stderr,none": 0.0215083020678561,
"inst_level_strict_acc,none": 0.6294964028776978,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.5545286506469501,
"prompt_level_loose_acc_stderr,none": 0.02138823777906319,
"inst_level_loose_acc,none": 0.6678657074340527,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.024924471299093656,
"exact_match_stderr,none": 0.004246719185859959,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.03908794788273615,
"exact_match_stderr,none": 0.01107905050865708
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925654
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.007142857142857143,
"exact_match_stderr,none": 0.005041703051390571
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.07253886010362694,
"exact_match_stderr,none": 0.018718998520678213
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.2728557180851064,
"acc_stderr,none": 0.004060934297107003
},
"leaderboard_musr": {
"acc_norm,none": 0.40476190476190477,
"acc_norm_stderr,none": 0.01739317377016208,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2578125,
"acc_norm_stderr,none": 0.027392944192695272
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.424,
"acc_norm_stderr,none": 0.03131803437491622
}
},
"leaderboard": {
"exact_match,none": 0.024924471299093656,
"exact_match_stderr,none": 0.004246719185859959,
"prompt_level_loose_acc,none": 0.5545286506469501,
"prompt_level_loose_acc_stderr,none": 0.02138823777906319,
"acc,none": 0.2728557180851064,
"acc_stderr,none": 0.004060934297107003,
"inst_level_loose_acc,none": 0.6678657074340527,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.5138632162661737,
"prompt_level_strict_acc_stderr,none": 0.0215083020678561,
"inst_level_strict_acc,none": 0.6294964028776978,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.3877286288753405,
"acc_norm_stderr,none": 0.0052581971394649075,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.40704738760631837,
"acc_norm_stderr,none": 0.0060823370750310475,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.784,
"acc_norm_stderr,none": 0.02607865766373279
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5721925133689839,
"acc_norm_stderr,none": 0.03627762136497335
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.02915021337415965
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.236,
"acc_norm_stderr,none": 0.026909337594953852
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.428,
"acc_norm_stderr,none": 0.031355968923772626
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.452,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2876712328767123,
"acc_norm_stderr,none": 0.037592781281728736
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.48314606741573035,
"acc_norm_stderr,none": 0.037560944447344834
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.832,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.268,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2835570469798658,
"acc_norm_stderr,none": 0.013061700919420478,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071728
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3021978021978022,
"acc_norm_stderr,none": 0.019670416969439074
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.265625,
"acc_norm_stderr,none": 0.02089005840079951
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5138632162661737,
"prompt_level_strict_acc_stderr,none": 0.0215083020678561,
"inst_level_strict_acc,none": 0.6294964028776978,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.5545286506469501,
"prompt_level_loose_acc_stderr,none": 0.02138823777906319,
"inst_level_loose_acc,none": 0.6678657074340527,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.024924471299093656,
"exact_match_stderr,none": 0.004246719185859959,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.03908794788273615,
"exact_match_stderr,none": 0.01107905050865708
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925654
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.007142857142857143,
"exact_match_stderr,none": 0.005041703051390571
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.07253886010362694,
"exact_match_stderr,none": 0.018718998520678213
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.2728557180851064,
"acc_stderr,none": 0.004060934297107003
},
"leaderboard_musr": {
"acc_norm,none": 0.40476190476190477,
"acc_norm_stderr,none": 0.01739317377016208,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2578125,
"acc_norm_stderr,none": 0.027392944192695272
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.424,
"acc_norm_stderr,none": 0.03131803437491622
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
eliasfiz/rlhf-raw | eliasfiz | "2024-11-19T23:59:39Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-19T23:59:33Z" | ---
dataset_info:
features:
- name: audioId
dtype: string
- name: audio_bits
dtype: audio
- name: rank
dtype: int64
- name: batchId
dtype: string
- name: prompt
dtype: string
- name: emotion
dtype: string
splits:
- name: train
num_bytes: 96468490.0
num_examples: 750
download_size: 87971561
dataset_size: 96468490.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/vonjack__Qwen2.5-Coder-0.5B-Merged-details | open-llm-leaderboard | "2024-11-20T00:09:33Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:06:18Z" | ---
pretty_name: Evaluation run of vonjack/Qwen2.5-Coder-0.5B-Merged
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [vonjack/Qwen2.5-Coder-0.5B-Merged](https://huggingface.co/vonjack/Qwen2.5-Coder-0.5B-Merged)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/vonjack__Qwen2.5-Coder-0.5B-Merged-details\"\
,\n\tname=\"vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T00-06-17.841413](https://huggingface.co/datasets/open-llm-leaderboard/vonjack__Qwen2.5-Coder-0.5B-Merged-details/blob/main/vonjack__Qwen2.5-Coder-0.5B-Merged/results_2024-11-20T00-06-17.841413.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"inst_level_strict_acc,none\": 0.37410071942446044,\n \
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\"\
: 0.2987417304449345,\n \"acc_norm_stderr,none\": 0.0050356192908862585,\n\
\ \"acc,none\": 0.12017952127659574,\n \"acc_stderr,none\"\
: 0.0029645679194853104,\n \"inst_level_loose_acc,none\": 0.38489208633093525,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.24584103512014788,\n \"prompt_level_strict_acc_stderr,none\": 0.01852941708079555,\n\
\ \"prompt_level_loose_acc,none\": 0.2532347504621072,\n \"\
prompt_level_loose_acc_stderr,none\": 0.018713577543655574,\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.3041138691199445,\n \"acc_norm_stderr,none\": 0.00582741441813678,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.472,\n \"acc_norm_stderr,none\": 0.031636489531544396\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.5187165775401069,\n\
\ \"acc_norm_stderr,none\": 0.03663608375537843\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.216,\n \"acc_norm_stderr,none\":\
\ 0.02607865766373279\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.304,\n \"acc_norm_stderr,none\": 0.02915021337415965\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.468,\n\
\ \"acc_norm_stderr,none\": 0.03162125257572558\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\":\
\ 0.02857695873043744\n },\n \"leaderboard_bbh_hyperbaton\": {\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115349\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750513\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.236,\n\
\ \"acc_norm_stderr,none\": 0.026909337594953852\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.23972602739726026,\n \"acc_norm_stderr,none\"\
: 0.035453462375110385\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\":\
\ 0.022752024491765464\n },\n \"leaderboard_bbh_ruin_names\": {\n\
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.5,\n \"\
acc_norm_stderr,none\": 0.03758230140014144\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.2,\n\
\ \"acc_norm_stderr,none\": 0.02534897002097912\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.18,\n \"acc_norm_stderr,none\": 0.02434689065029351\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\":\
\ 0.022752024491765464\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\":\
\ \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\": 0.488,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2533557046979866,\n\
\ \"acc_norm_stderr,none\": 0.012579095773296255,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.22727272727272727,\n \"acc_norm_stderr,none\": 0.029857515673386438\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.22893772893772893,\n\
\ \"acc_norm_stderr,none\": 0.01799720901943154\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.29464285714285715,\n \"acc_norm_stderr,none\"\
: 0.021562481080109767\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.24584103512014788,\n \"prompt_level_strict_acc_stderr,none\": 0.01852941708079555,\n\
\ \"inst_level_strict_acc,none\": 0.37410071942446044,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.2532347504621072,\n \"prompt_level_loose_acc_stderr,none\": 0.018713577543655574,\n\
\ \"inst_level_loose_acc,none\": 0.38489208633093525,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_hard\"\n },\n \
\ \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\"\
: \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\":\
\ 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\"\
: 0.12017952127659574,\n \"acc_stderr,none\": 0.0029645679194853104\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.32936507936507936,\n\
\ \"acc_norm_stderr,none\": 0.016472516580039485,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.20703125,\n\
\ \"acc_norm_stderr,none\": 0.025373238296688486\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.276,\n \"acc_norm_stderr,none\":\
\ 0.02832853727421142\n }\n },\n \"leaderboard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"inst_level_strict_acc,none\"\
: 0.37410071942446044,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"acc_norm,none\": 0.2987417304449345,\n \"acc_norm_stderr,none\"\
: 0.0050356192908862585,\n \"acc,none\": 0.12017952127659574,\n \"\
acc_stderr,none\": 0.0029645679194853104,\n \"inst_level_loose_acc,none\"\
: 0.38489208633093525,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_strict_acc,none\": 0.24584103512014788,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.01852941708079555,\n \"prompt_level_loose_acc,none\": 0.2532347504621072,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.018713577543655574,\n \
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.3041138691199445,\n \"acc_norm_stderr,none\": 0.00582741441813678,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.472,\n \"acc_norm_stderr,none\": 0.031636489531544396\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.216,\n \"acc_norm_stderr,none\": 0.02607865766373279\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.304,\n \"acc_norm_stderr,none\": 0.02915021337415965\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\": 0.03162125257572558\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115349\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750513\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.236,\n \"acc_norm_stderr,none\": 0.026909337594953852\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.23972602739726026,\n\
\ \"acc_norm_stderr,none\": 0.035453462375110385\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\": 0.03758230140014144\n\
\ },\n \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \"\
\ - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\": 0.5,\n\
\ \"acc_norm_stderr,none\": 0.031686212526223896\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \"\
acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n \
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n\
\ \"acc_norm,none\": 0.18,\n \"acc_norm_stderr,none\": 0.02434689065029351\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2533557046979866,\n\
\ \"acc_norm_stderr,none\": 0.012579095773296255,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.22727272727272727,\n\
\ \"acc_norm_stderr,none\": 0.029857515673386438\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.22893772893772893,\n \"acc_norm_stderr,none\": 0.01799720901943154\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.29464285714285715,\n \"acc_norm_stderr,none\"\
: 0.021562481080109767\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.24584103512014788,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01852941708079555,\n \
\ \"inst_level_strict_acc,none\": 0.37410071942446044,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.2532347504621072,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.018713577543655574,\n \"inst_level_loose_acc,none\"\
: 0.38489208633093525,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n\
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_hard\"\
\n },\n \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.12017952127659574,\n\
\ \"acc_stderr,none\": 0.0029645679194853104\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.32936507936507936,\n \"acc_norm_stderr,none\"\
: 0.016472516580039485,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.20703125,\n\
\ \"acc_norm_stderr,none\": 0.025373238296688486\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.276,\n \"acc_norm_stderr,none\": 0.02832853727421142\n }\n}\n```"
repo_url: https://huggingface.co/vonjack/Qwen2.5-Coder-0.5B-Merged
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_ifeval
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-06-17.841413.jsonl'
- config_name: vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T00_06_17.841413
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-06-17.841413.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-06-17.841413.jsonl'
---
# Dataset Card for Evaluation run of vonjack/Qwen2.5-Coder-0.5B-Merged
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [vonjack/Qwen2.5-Coder-0.5B-Merged](https://huggingface.co/vonjack/Qwen2.5-Coder-0.5B-Merged)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/vonjack__Qwen2.5-Coder-0.5B-Merged-details",
name="vonjack__Qwen2.5-Coder-0.5B-Merged__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T00-06-17.841413](https://huggingface.co/datasets/open-llm-leaderboard/vonjack__Qwen2.5-Coder-0.5B-Merged-details/blob/main/vonjack__Qwen2.5-Coder-0.5B-Merged/results_2024-11-20T00-06-17.841413.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.2987417304449345,
"acc_norm_stderr,none": 0.0050356192908862585,
"acc,none": 0.12017952127659574,
"acc_stderr,none": 0.0029645679194853104,
"inst_level_loose_acc,none": 0.38489208633093525,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.24584103512014788,
"prompt_level_strict_acc_stderr,none": 0.01852941708079555,
"prompt_level_loose_acc,none": 0.2532347504621072,
"prompt_level_loose_acc_stderr,none": 0.018713577543655574,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3041138691199445,
"acc_norm_stderr,none": 0.00582741441813678,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.472,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.216,
"acc_norm_stderr,none": 0.02607865766373279
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.02915021337415965
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750513
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.236,
"acc_norm_stderr,none": 0.026909337594953852
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.23972602739726026,
"acc_norm_stderr,none": 0.035453462375110385
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.03758230140014144
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.18,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2533557046979866,
"acc_norm_stderr,none": 0.012579095773296255,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.22727272727272727,
"acc_norm_stderr,none": 0.029857515673386438
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.22893772893772893,
"acc_norm_stderr,none": 0.01799720901943154
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.29464285714285715,
"acc_norm_stderr,none": 0.021562481080109767
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.24584103512014788,
"prompt_level_strict_acc_stderr,none": 0.01852941708079555,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2532347504621072,
"prompt_level_loose_acc_stderr,none": 0.018713577543655574,
"inst_level_loose_acc,none": 0.38489208633093525,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.12017952127659574,
"acc_stderr,none": 0.0029645679194853104
},
"leaderboard_musr": {
"acc_norm,none": 0.32936507936507936,
"acc_norm_stderr,none": 0.016472516580039485,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.20703125,
"acc_norm_stderr,none": 0.025373238296688486
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.276,
"acc_norm_stderr,none": 0.02832853727421142
}
},
"leaderboard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.2987417304449345,
"acc_norm_stderr,none": 0.0050356192908862585,
"acc,none": 0.12017952127659574,
"acc_stderr,none": 0.0029645679194853104,
"inst_level_loose_acc,none": 0.38489208633093525,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.24584103512014788,
"prompt_level_strict_acc_stderr,none": 0.01852941708079555,
"prompt_level_loose_acc,none": 0.2532347504621072,
"prompt_level_loose_acc_stderr,none": 0.018713577543655574,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3041138691199445,
"acc_norm_stderr,none": 0.00582741441813678,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.472,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.216,
"acc_norm_stderr,none": 0.02607865766373279
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.02915021337415965
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750513
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.236,
"acc_norm_stderr,none": 0.026909337594953852
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.23972602739726026,
"acc_norm_stderr,none": 0.035453462375110385
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.03758230140014144
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.18,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2533557046979866,
"acc_norm_stderr,none": 0.012579095773296255,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.22727272727272727,
"acc_norm_stderr,none": 0.029857515673386438
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.22893772893772893,
"acc_norm_stderr,none": 0.01799720901943154
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.29464285714285715,
"acc_norm_stderr,none": 0.021562481080109767
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.24584103512014788,
"prompt_level_strict_acc_stderr,none": 0.01852941708079555,
"inst_level_strict_acc,none": 0.37410071942446044,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2532347504621072,
"prompt_level_loose_acc_stderr,none": 0.018713577543655574,
"inst_level_loose_acc,none": 0.38489208633093525,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.12017952127659574,
"acc_stderr,none": 0.0029645679194853104
},
"leaderboard_musr": {
"acc_norm,none": 0.32936507936507936,
"acc_norm_stderr,none": 0.016472516580039485,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.20703125,
"acc_norm_stderr,none": 0.025373238296688486
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.276,
"acc_norm_stderr,none": 0.02832853727421142
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/BlackBeenie__Bloslain-8B-v0.2-details | open-llm-leaderboard | "2024-11-20T00:17:45Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:14:48Z" | ---
pretty_name: Evaluation run of BlackBeenie/Bloslain-8B-v0.2
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [BlackBeenie/Bloslain-8B-v0.2](https://huggingface.co/BlackBeenie/Bloslain-8B-v0.2)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/BlackBeenie__Bloslain-8B-v0.2-details\"\
,\n\tname=\"BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T00-14-46.384603](https://huggingface.co/datasets/open-llm-leaderboard/BlackBeenie__Bloslain-8B-v0.2-details/blob/main/BlackBeenie__Bloslain-8B-v0.2/results_2024-11-20T00-14-46.384603.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc,none\": 0.3653590425531915,\n \"acc_stderr,none\"\
: 0.00439008688059826,\n \"inst_level_loose_acc,none\": 0.6247002398081535,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.5157116451016636,\n \"prompt_level_loose_acc_stderr,none\": 0.021505948540061572,\n\
\ \"prompt_level_strict_acc,none\": 0.4399260628465804,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.021360708220801928,\n \"\
acc_norm,none\": 0.46750551303671034,\n \"acc_norm_stderr,none\": 0.0053551995731588,\n\
\ \"exact_match,none\": 0.14501510574018128,\n \"exact_match_stderr,none\"\
: 0.008997105714006057,\n \"inst_level_strict_acc,none\": 0.564748201438849,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.5087658392640166,\n \"acc_norm_stderr,none\": 0.006197730311334722,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.832,\n \"acc_norm_stderr,none\": 0.023692813205492536\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.6149732620320856,\n\
\ \"acc_norm_stderr,none\": 0.03567936280544673\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\":\
\ 0.03160397514522374\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.564,\n\
\ \"acc_norm_stderr,none\": 0.03142556706028136\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.412,\n \"acc_norm_stderr,none\":\
\ 0.03119159602602282\n },\n \"leaderboard_bbh_hyperbaton\": {\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.716,\n \"acc_norm_stderr,none\": 0.028576958730437443\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.672,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.696,\n \"acc_norm_stderr,none\": 0.029150213374159652\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\":\
\ 0.030186568464511673\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.444,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.4794520547945205,\n \"acc_norm_stderr,none\": 0.041487661809251744\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028136\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.648,\n \
\ \"acc_norm_stderr,none\": 0.030266288057359866\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968774\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.651685393258427,\n \"acc_norm_stderr,none\"\
: 0.035811144737534356\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.704,\n \"acc_norm_stderr,none\": 0.028928939388379697\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.24,\n\
\ \"acc_norm_stderr,none\": 0.027065293652238982\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\":\
\ 0.024960691989171963\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.312,\n \"acc_norm_stderr,none\":\
\ 0.02936106757521985\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3062080536912752,\n\
\ \"acc_norm_stderr,none\": 0.013366495210573544,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.30808080808080807,\n \"acc_norm_stderr,none\": 0.03289477330098615\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.3021978021978022,\n\
\ \"acc_norm_stderr,none\": 0.019670416969439074\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.31026785714285715,\n \"acc_norm_stderr,none\"\
: 0.021880380205954585\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.4399260628465804,\n \"prompt_level_strict_acc_stderr,none\": 0.021360708220801928,\n\
\ \"inst_level_strict_acc,none\": 0.564748201438849,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.5157116451016636,\n \"prompt_level_loose_acc_stderr,none\": 0.021505948540061572,\n\
\ \"inst_level_loose_acc,none\": 0.6247002398081535,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.14501510574018128,\n \"exact_match_stderr,none\"\
: 0.008997105714006057,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.30618892508143325,\n\
\ \"exact_match_stderr,none\": 0.026348423774891612\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.07317073170731707,\n \"exact_match_stderr,none\": 0.023577005978097667\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.030303030303030304,\n\
\ \"exact_match_stderr,none\": 0.014977019714308254\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.017857142857142856,\n \"exact_match_stderr,none\": 0.007928503387888855\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.08441558441558442,\n\
\ \"exact_match_stderr,none\": 0.022475781231866967\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.32124352331606215,\n \"exact_match_stderr,none\"\
: 0.03369950868549069\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.037037037037037035,\n \"exact_match_stderr,none\"\
: 0.016314377626726044\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.3653590425531915,\n\
\ \"acc_stderr,none\": 0.00439008688059826\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4074074074074074,\n \"acc_norm_stderr,none\"\
: 0.01752871103086407,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.54,\n\
\ \"acc_norm_stderr,none\": 0.031584653891499004\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.38671875,\n \"acc_norm_stderr,none\"\
: 0.030497017430410063\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.296,\n \"acc_norm_stderr,none\": 0.028928939388379694\n\
\ }\n },\n \"leaderboard\": {\n \"acc,none\": 0.3653590425531915,\n\
\ \"acc_stderr,none\": 0.00439008688059826,\n \"inst_level_loose_acc,none\"\
: 0.6247002398081535,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_loose_acc,none\": 0.5157116451016636,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.021505948540061572,\n \"prompt_level_strict_acc,none\": 0.4399260628465804,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.021360708220801928,\n \
\ \"acc_norm,none\": 0.46750551303671034,\n \"acc_norm_stderr,none\": 0.0053551995731588,\n\
\ \"exact_match,none\": 0.14501510574018128,\n \"exact_match_stderr,none\"\
: 0.008997105714006057,\n \"inst_level_strict_acc,none\": 0.564748201438849,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"alias\": \"\
leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.5087658392640166,\n\
\ \"acc_norm_stderr,none\": 0.006197730311334722,\n \"alias\": \"\
\ - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.832,\n \"acc_norm_stderr,none\": 0.023692813205492536\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6149732620320856,\n \"acc_norm_stderr,none\"\
: 0.03567936280544673\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028136\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.412,\n \"acc_norm_stderr,none\": 0.03119159602602282\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.716,\n \"acc_norm_stderr,none\": 0.028576958730437443\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.672,\n \"acc_norm_stderr,none\": 0.029752391824475363\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.696,\n \"acc_norm_stderr,none\": 0.029150213374159652\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\": 0.030186568464511673\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.444,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.4794520547945205,\n\
\ \"acc_norm_stderr,none\": 0.041487661809251744\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028136\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.648,\n \"acc_norm_stderr,none\": 0.030266288057359866\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.651685393258427,\n \"acc_norm_stderr,none\"\
: 0.035811144737534356\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.704,\n \"acc_norm_stderr,none\": 0.028928939388379697\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652238982\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.312,\n \"acc_norm_stderr,none\": 0.02936106757521985\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3062080536912752,\n\
\ \"acc_norm_stderr,none\": 0.013366495210573544,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.30808080808080807,\n\
\ \"acc_norm_stderr,none\": 0.03289477330098615\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.3021978021978022,\n \"acc_norm_stderr,none\": 0.019670416969439074\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.31026785714285715,\n \"acc_norm_stderr,none\"\
: 0.021880380205954585\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.4399260628465804,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.021360708220801928,\n \
\ \"inst_level_strict_acc,none\": 0.564748201438849,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.5157116451016636,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.021505948540061572,\n \"inst_level_loose_acc,none\"\
: 0.6247002398081535,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.14501510574018128,\n\
\ \"exact_match_stderr,none\": 0.008997105714006057,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.30618892508143325,\n \"exact_match_stderr,none\": 0.026348423774891612\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.07317073170731707,\n \"exact_match_stderr,none\": 0.023577005978097667\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.030303030303030304,\n \"exact_match_stderr,none\"\
: 0.014977019714308254\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.017857142857142856,\n \"exact_match_stderr,none\"\
: 0.007928503387888855\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.08441558441558442,\n \"exact_match_stderr,none\": 0.022475781231866967\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.32124352331606215,\n \"exact_match_stderr,none\"\
: 0.03369950868549069\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.037037037037037035,\n \"exact_match_stderr,none\": 0.016314377626726044\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.3653590425531915,\n \"acc_stderr,none\": 0.00439008688059826\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.4074074074074074,\n\
\ \"acc_norm_stderr,none\": 0.01752871103086407,\n \"alias\": \" -\
\ leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.38671875,\n \"acc_norm_stderr,none\": 0.030497017430410063\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.296,\n \"acc_norm_stderr,none\": 0.028928939388379694\n\
\ }\n}\n```"
repo_url: https://huggingface.co/BlackBeenie/Bloslain-8B-v0.2
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_ifeval
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-14-46.384603.jsonl'
- config_name: BlackBeenie__Bloslain-8B-v0.2__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T00_14_46.384603
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-14-46.384603.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-14-46.384603.jsonl'
---
# Dataset Card for Evaluation run of BlackBeenie/Bloslain-8B-v0.2
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [BlackBeenie/Bloslain-8B-v0.2](https://huggingface.co/BlackBeenie/Bloslain-8B-v0.2)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/BlackBeenie__Bloslain-8B-v0.2-details",
name="BlackBeenie__Bloslain-8B-v0.2__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T00-14-46.384603](https://huggingface.co/datasets/open-llm-leaderboard/BlackBeenie__Bloslain-8B-v0.2-details/blob/main/BlackBeenie__Bloslain-8B-v0.2/results_2024-11-20T00-14-46.384603.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc,none": 0.3653590425531915,
"acc_stderr,none": 0.00439008688059826,
"inst_level_loose_acc,none": 0.6247002398081535,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.5157116451016636,
"prompt_level_loose_acc_stderr,none": 0.021505948540061572,
"prompt_level_strict_acc,none": 0.4399260628465804,
"prompt_level_strict_acc_stderr,none": 0.021360708220801928,
"acc_norm,none": 0.46750551303671034,
"acc_norm_stderr,none": 0.0053551995731588,
"exact_match,none": 0.14501510574018128,
"exact_match_stderr,none": 0.008997105714006057,
"inst_level_strict_acc,none": 0.564748201438849,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5087658392640166,
"acc_norm_stderr,none": 0.006197730311334722,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.832,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.412,
"acc_norm_stderr,none": 0.03119159602602282
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.716,
"acc_norm_stderr,none": 0.028576958730437443
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.672,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.696,
"acc_norm_stderr,none": 0.029150213374159652
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.444,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4794520547945205,
"acc_norm_stderr,none": 0.041487661809251744
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.648,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.651685393258427,
"acc_norm_stderr,none": 0.035811144737534356
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3062080536912752,
"acc_norm_stderr,none": 0.013366495210573544,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.30808080808080807,
"acc_norm_stderr,none": 0.03289477330098615
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3021978021978022,
"acc_norm_stderr,none": 0.019670416969439074
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.31026785714285715,
"acc_norm_stderr,none": 0.021880380205954585
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.4399260628465804,
"prompt_level_strict_acc_stderr,none": 0.021360708220801928,
"inst_level_strict_acc,none": 0.564748201438849,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.5157116451016636,
"prompt_level_loose_acc_stderr,none": 0.021505948540061572,
"inst_level_loose_acc,none": 0.6247002398081535,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.14501510574018128,
"exact_match_stderr,none": 0.008997105714006057,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.30618892508143325,
"exact_match_stderr,none": 0.026348423774891612
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.07317073170731707,
"exact_match_stderr,none": 0.023577005978097667
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.030303030303030304,
"exact_match_stderr,none": 0.014977019714308254
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.017857142857142856,
"exact_match_stderr,none": 0.007928503387888855
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.08441558441558442,
"exact_match_stderr,none": 0.022475781231866967
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.32124352331606215,
"exact_match_stderr,none": 0.03369950868549069
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.037037037037037035,
"exact_match_stderr,none": 0.016314377626726044
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3653590425531915,
"acc_stderr,none": 0.00439008688059826
},
"leaderboard_musr": {
"acc_norm,none": 0.4074074074074074,
"acc_norm_stderr,none": 0.01752871103086407,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.38671875,
"acc_norm_stderr,none": 0.030497017430410063
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.296,
"acc_norm_stderr,none": 0.028928939388379694
}
},
"leaderboard": {
"acc,none": 0.3653590425531915,
"acc_stderr,none": 0.00439008688059826,
"inst_level_loose_acc,none": 0.6247002398081535,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.5157116451016636,
"prompt_level_loose_acc_stderr,none": 0.021505948540061572,
"prompt_level_strict_acc,none": 0.4399260628465804,
"prompt_level_strict_acc_stderr,none": 0.021360708220801928,
"acc_norm,none": 0.46750551303671034,
"acc_norm_stderr,none": 0.0053551995731588,
"exact_match,none": 0.14501510574018128,
"exact_match_stderr,none": 0.008997105714006057,
"inst_level_strict_acc,none": 0.564748201438849,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5087658392640166,
"acc_norm_stderr,none": 0.006197730311334722,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.832,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.412,
"acc_norm_stderr,none": 0.03119159602602282
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.716,
"acc_norm_stderr,none": 0.028576958730437443
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.672,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.696,
"acc_norm_stderr,none": 0.029150213374159652
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.444,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4794520547945205,
"acc_norm_stderr,none": 0.041487661809251744
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.648,
"acc_norm_stderr,none": 0.030266288057359866
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.651685393258427,
"acc_norm_stderr,none": 0.035811144737534356
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.312,
"acc_norm_stderr,none": 0.02936106757521985
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3062080536912752,
"acc_norm_stderr,none": 0.013366495210573544,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.30808080808080807,
"acc_norm_stderr,none": 0.03289477330098615
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3021978021978022,
"acc_norm_stderr,none": 0.019670416969439074
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.31026785714285715,
"acc_norm_stderr,none": 0.021880380205954585
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.4399260628465804,
"prompt_level_strict_acc_stderr,none": 0.021360708220801928,
"inst_level_strict_acc,none": 0.564748201438849,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.5157116451016636,
"prompt_level_loose_acc_stderr,none": 0.021505948540061572,
"inst_level_loose_acc,none": 0.6247002398081535,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.14501510574018128,
"exact_match_stderr,none": 0.008997105714006057,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.30618892508143325,
"exact_match_stderr,none": 0.026348423774891612
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.07317073170731707,
"exact_match_stderr,none": 0.023577005978097667
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.030303030303030304,
"exact_match_stderr,none": 0.014977019714308254
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.017857142857142856,
"exact_match_stderr,none": 0.007928503387888855
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.08441558441558442,
"exact_match_stderr,none": 0.022475781231866967
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.32124352331606215,
"exact_match_stderr,none": 0.03369950868549069
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.037037037037037035,
"exact_match_stderr,none": 0.016314377626726044
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3653590425531915,
"acc_stderr,none": 0.00439008688059826
},
"leaderboard_musr": {
"acc_norm,none": 0.4074074074074074,
"acc_norm_stderr,none": 0.01752871103086407,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.38671875,
"acc_norm_stderr,none": 0.030497017430410063
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.296,
"acc_norm_stderr,none": 0.028928939388379694
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/3rd-Degree-Burn__L-3.1-Science-Writer-8B-details | open-llm-leaderboard | "2024-11-20T00:20:57Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:17:50Z" | ---
pretty_name: Evaluation run of 3rd-Degree-Burn/L-3.1-Science-Writer-8B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [3rd-Degree-Burn/L-3.1-Science-Writer-8B](https://huggingface.co/3rd-Degree-Burn/L-3.1-Science-Writer-8B)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/3rd-Degree-Burn__L-3.1-Science-Writer-8B-details\"\
,\n\tname=\"3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T00-17-49.945335](https://huggingface.co/datasets/open-llm-leaderboard/3rd-Degree-Burn__L-3.1-Science-Writer-8B-details/blob/main/3rd-Degree-Burn__L-3.1-Science-Writer-8B/results_2024-11-20T00-17-49.945335.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc,none\": 0.36494348404255317,\n \"acc_stderr,none\"\
: 0.004389025779632208,\n \"inst_level_loose_acc,none\": 0.5455635491606715,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\"\
: 0.45544169152938124,\n \"acc_norm_stderr,none\": 0.00535232797982857,\n\
\ \"inst_level_strict_acc,none\": 0.4976019184652279,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.35489833641404805,\n \"prompt_level_strict_acc_stderr,none\": 0.02059060857592648,\n\
\ \"prompt_level_loose_acc,none\": 0.40850277264325324,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.021153244098720447,\n \"\
exact_match,none\": 0.1027190332326284,\n \"exact_match_stderr,none\"\
: 0.008038247394412276,\n \"alias\": \"leaderboard\"\n },\n \
\ \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.5007811143898628,\n\
\ \"acc_norm_stderr,none\": 0.0062536290899852,\n \"alias\"\
: \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.804,\n \"acc_norm_stderr,none\": 0.025156857313255922\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.6149732620320856,\n\
\ \"acc_norm_stderr,none\": 0.03567936280544673\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.472,\n \"acc_norm_stderr,none\":\
\ 0.031636489531544396\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.408,\n \"acc_norm_stderr,none\": 0.031145209846548512\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.564,\n\
\ \"acc_norm_stderr,none\": 0.03142556706028136\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.416,\n \"acc_norm_stderr,none\":\
\ 0.031235856237014505\n },\n \"leaderboard_bbh_hyperbaton\": {\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.72,\n \"acc_norm_stderr,none\": 0.02845414827783231\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022818\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.66,\n \"acc_norm_stderr,none\": 0.030020073605457876\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\":\
\ 0.0316851985511992\n },\n \"leaderboard_bbh_object_counting\": {\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\",\n \"\
acc_norm,none\": 0.456,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.4931506849315068,\n \"acc_norm_stderr,none\": 0.04151884382177796\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.664,\n \
\ \"acc_norm_stderr,none\": 0.029933259094191533\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6910112359550562,\n \"acc_norm_stderr,none\"\
: 0.0347317978779636\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.728,\n \"acc_norm_stderr,none\": 0.028200088296309975\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.348,\n\
\ \"acc_norm_stderr,none\": 0.030186568464511673\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\": 0.025901884690541117\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\":\
\ 0.02391551394448624\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\":\
\ 0.029462657598578648\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.27432885906040266,\n\
\ \"acc_norm_stderr,none\": 0.01293701558369602,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2878787878787879,\n \"acc_norm_stderr,none\": 0.03225883512300998\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.26556776556776557,\n\
\ \"acc_norm_stderr,none\": 0.01891756755796826\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27901785714285715,\n \"acc_norm_stderr,none\"\
: 0.021214094157265946\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.35489833641404805,\n \"prompt_level_strict_acc_stderr,none\": 0.02059060857592648,\n\
\ \"inst_level_strict_acc,none\": 0.49760191846522783,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.40850277264325324,\n \"prompt_level_loose_acc_stderr,none\": 0.021153244098720447,\n\
\ \"inst_level_loose_acc,none\": 0.5455635491606715,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.1027190332326284,\n \"exact_match_stderr,none\"\
: 0.008038247394412276,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.18241042345276873,\n\
\ \"exact_match_stderr,none\": 0.02207657949763785\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \" \
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.04065040650406504,\n \"exact_match_stderr,none\": 0.017878907564437465\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.045454545454545456,\n\
\ \"exact_match_stderr,none\": 0.018199158975632696\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.010714285714285714,\n \"exact_match_stderr,none\": 0.006163684194761604\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.09740259740259741,\n\
\ \"exact_match_stderr,none\": 0.023971024368870292\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.23834196891191708,\n \"exact_match_stderr,none\"\
: 0.030748905363909895\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.037037037037037035,\n \"exact_match_stderr,none\"\
: 0.016314377626726044\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.36494348404255317,\n\
\ \"acc_stderr,none\": 0.004389025779632208\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.3955026455026455,\n \"acc_norm_stderr,none\"\
: 0.017081392990780264,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.588,\n\
\ \"acc_norm_stderr,none\": 0.031191596026022818\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.33984375,\n \"acc_norm_stderr,none\"\
: 0.029661487249077814\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ }\n },\n \"leaderboard\": {\n \"acc,none\": 0.36494348404255317,\n\
\ \"acc_stderr,none\": 0.004389025779632208,\n \"inst_level_loose_acc,none\"\
: 0.5455635491606715,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"acc_norm,none\": 0.45544169152938124,\n \"acc_norm_stderr,none\"\
: 0.00535232797982857,\n \"inst_level_strict_acc,none\": 0.4976019184652279,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.35489833641404805,\n \"prompt_level_strict_acc_stderr,none\": 0.02059060857592648,\n\
\ \"prompt_level_loose_acc,none\": 0.40850277264325324,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.021153244098720447,\n \"exact_match,none\": 0.1027190332326284,\n \
\ \"exact_match_stderr,none\": 0.008038247394412276,\n \"alias\": \"\
leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.5007811143898628,\n\
\ \"acc_norm_stderr,none\": 0.0062536290899852,\n \"alias\": \" -\
\ leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n \
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.804,\n \"acc_norm_stderr,none\": 0.025156857313255922\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6149732620320856,\n \"acc_norm_stderr,none\"\
: 0.03567936280544673\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.472,\n \"acc_norm_stderr,none\": 0.031636489531544396\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.408,\n \"acc_norm_stderr,none\": 0.031145209846548512\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028136\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.416,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.72,\n \"acc_norm_stderr,none\": 0.02845414827783231\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.396,\n \"acc_norm_stderr,none\": 0.030993197854577898\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022818\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.66,\n \"acc_norm_stderr,none\": 0.030020073605457876\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.456,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.4931506849315068,\n\
\ \"acc_norm_stderr,none\": 0.04151884382177796\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.664,\n \"acc_norm_stderr,none\": 0.029933259094191533\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6910112359550562,\n \"acc_norm_stderr,none\"\
: 0.0347317978779636\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.728,\n \"acc_norm_stderr,none\": 0.028200088296309975\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.348,\n \"acc_norm_stderr,none\": 0.030186568464511673\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\": 0.025901884690541117\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.27432885906040266,\n\
\ \"acc_norm_stderr,none\": 0.01293701558369602,\n \"alias\": \" -\
\ leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"alias\"\
: \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2878787878787879,\n\
\ \"acc_norm_stderr,none\": 0.03225883512300998\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.26556776556776557,\n \"acc_norm_stderr,none\": 0.01891756755796826\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27901785714285715,\n \"acc_norm_stderr,none\"\
: 0.021214094157265946\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.35489833641404805,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.02059060857592648,\n \
\ \"inst_level_strict_acc,none\": 0.49760191846522783,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.40850277264325324,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.021153244098720447,\n \"inst_level_loose_acc,none\"\
: 0.5455635491606715,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.1027190332326284,\n\
\ \"exact_match_stderr,none\": 0.008038247394412276,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.18241042345276873,\n \"exact_match_stderr,none\": 0.02207657949763785\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.04065040650406504,\n \"exact_match_stderr,none\": 0.017878907564437465\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.045454545454545456,\n \"exact_match_stderr,none\"\
: 0.018199158975632696\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.010714285714285714,\n \"exact_match_stderr,none\"\
: 0.006163684194761604\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.09740259740259741,\n \"exact_match_stderr,none\": 0.023971024368870292\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.23834196891191708,\n \"exact_match_stderr,none\"\
: 0.030748905363909895\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.037037037037037035,\n \"exact_match_stderr,none\": 0.016314377626726044\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.36494348404255317,\n \"acc_stderr,none\": 0.004389025779632208\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.3955026455026455,\n\
\ \"acc_norm_stderr,none\": 0.017081392990780264,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022818\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.33984375,\n \"acc_norm_stderr,none\": 0.029661487249077814\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ }\n}\n```"
repo_url: https://huggingface.co/3rd-Degree-Burn/L-3.1-Science-Writer-8B
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_ifeval
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-17-49.945335.jsonl'
- config_name: 3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T00_17_49.945335
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-17-49.945335.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-17-49.945335.jsonl'
---
# Dataset Card for Evaluation run of 3rd-Degree-Burn/L-3.1-Science-Writer-8B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [3rd-Degree-Burn/L-3.1-Science-Writer-8B](https://huggingface.co/3rd-Degree-Burn/L-3.1-Science-Writer-8B)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/3rd-Degree-Burn__L-3.1-Science-Writer-8B-details",
name="3rd-Degree-Burn__L-3.1-Science-Writer-8B__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T00-17-49.945335](https://huggingface.co/datasets/open-llm-leaderboard/3rd-Degree-Burn__L-3.1-Science-Writer-8B-details/blob/main/3rd-Degree-Burn__L-3.1-Science-Writer-8B/results_2024-11-20T00-17-49.945335.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc,none": 0.36494348404255317,
"acc_stderr,none": 0.004389025779632208,
"inst_level_loose_acc,none": 0.5455635491606715,
"inst_level_loose_acc_stderr,none": "N/A",
"acc_norm,none": 0.45544169152938124,
"acc_norm_stderr,none": 0.00535232797982857,
"inst_level_strict_acc,none": 0.4976019184652279,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.35489833641404805,
"prompt_level_strict_acc_stderr,none": 0.02059060857592648,
"prompt_level_loose_acc,none": 0.40850277264325324,
"prompt_level_loose_acc_stderr,none": 0.021153244098720447,
"exact_match,none": 0.1027190332326284,
"exact_match_stderr,none": 0.008038247394412276,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5007811143898628,
"acc_norm_stderr,none": 0.0062536290899852,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.804,
"acc_norm_stderr,none": 0.025156857313255922
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.472,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.408,
"acc_norm_stderr,none": 0.031145209846548512
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.72,
"acc_norm_stderr,none": 0.02845414827783231
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.66,
"acc_norm_stderr,none": 0.030020073605457876
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4931506849315068,
"acc_norm_stderr,none": 0.04151884382177796
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6910112359550562,
"acc_norm_stderr,none": 0.0347317978779636
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.348,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27432885906040266,
"acc_norm_stderr,none": 0.01293701558369602,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2878787878787879,
"acc_norm_stderr,none": 0.03225883512300998
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.26556776556776557,
"acc_norm_stderr,none": 0.01891756755796826
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27901785714285715,
"acc_norm_stderr,none": 0.021214094157265946
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.35489833641404805,
"prompt_level_strict_acc_stderr,none": 0.02059060857592648,
"inst_level_strict_acc,none": 0.49760191846522783,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.40850277264325324,
"prompt_level_loose_acc_stderr,none": 0.021153244098720447,
"inst_level_loose_acc,none": 0.5455635491606715,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.1027190332326284,
"exact_match_stderr,none": 0.008038247394412276,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.18241042345276873,
"exact_match_stderr,none": 0.02207657949763785
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.04065040650406504,
"exact_match_stderr,none": 0.017878907564437465
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.018199158975632696
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.010714285714285714,
"exact_match_stderr,none": 0.006163684194761604
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.09740259740259741,
"exact_match_stderr,none": 0.023971024368870292
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.23834196891191708,
"exact_match_stderr,none": 0.030748905363909895
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.037037037037037035,
"exact_match_stderr,none": 0.016314377626726044
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.36494348404255317,
"acc_stderr,none": 0.004389025779632208
},
"leaderboard_musr": {
"acc_norm,none": 0.3955026455026455,
"acc_norm_stderr,none": 0.017081392990780264,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.33984375,
"acc_norm_stderr,none": 0.029661487249077814
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
}
},
"leaderboard": {
"acc,none": 0.36494348404255317,
"acc_stderr,none": 0.004389025779632208,
"inst_level_loose_acc,none": 0.5455635491606715,
"inst_level_loose_acc_stderr,none": "N/A",
"acc_norm,none": 0.45544169152938124,
"acc_norm_stderr,none": 0.00535232797982857,
"inst_level_strict_acc,none": 0.4976019184652279,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.35489833641404805,
"prompt_level_strict_acc_stderr,none": 0.02059060857592648,
"prompt_level_loose_acc,none": 0.40850277264325324,
"prompt_level_loose_acc_stderr,none": 0.021153244098720447,
"exact_match,none": 0.1027190332326284,
"exact_match_stderr,none": 0.008038247394412276,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5007811143898628,
"acc_norm_stderr,none": 0.0062536290899852,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.804,
"acc_norm_stderr,none": 0.025156857313255922
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.472,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.408,
"acc_norm_stderr,none": 0.031145209846548512
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.72,
"acc_norm_stderr,none": 0.02845414827783231
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.030993197854577898
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.66,
"acc_norm_stderr,none": 0.030020073605457876
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4931506849315068,
"acc_norm_stderr,none": 0.04151884382177796
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6910112359550562,
"acc_norm_stderr,none": 0.0347317978779636
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.348,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27432885906040266,
"acc_norm_stderr,none": 0.01293701558369602,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2878787878787879,
"acc_norm_stderr,none": 0.03225883512300998
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.26556776556776557,
"acc_norm_stderr,none": 0.01891756755796826
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27901785714285715,
"acc_norm_stderr,none": 0.021214094157265946
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.35489833641404805,
"prompt_level_strict_acc_stderr,none": 0.02059060857592648,
"inst_level_strict_acc,none": 0.49760191846522783,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.40850277264325324,
"prompt_level_loose_acc_stderr,none": 0.021153244098720447,
"inst_level_loose_acc,none": 0.5455635491606715,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.1027190332326284,
"exact_match_stderr,none": 0.008038247394412276,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.18241042345276873,
"exact_match_stderr,none": 0.02207657949763785
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.04065040650406504,
"exact_match_stderr,none": 0.017878907564437465
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.018199158975632696
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.010714285714285714,
"exact_match_stderr,none": 0.006163684194761604
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.09740259740259741,
"exact_match_stderr,none": 0.023971024368870292
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.23834196891191708,
"exact_match_stderr,none": 0.030748905363909895
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.037037037037037035,
"exact_match_stderr,none": 0.016314377626726044
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.36494348404255317,
"acc_stderr,none": 0.004389025779632208
},
"leaderboard_musr": {
"acc_norm,none": 0.3955026455026455,
"acc_norm_stderr,none": 0.017081392990780264,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.33984375,
"acc_norm_stderr,none": 0.029661487249077814
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/icefog72__Ice0.38-19.11-RP-details | open-llm-leaderboard | "2024-11-20T00:21:14Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:18:14Z" | ---
pretty_name: Evaluation run of icefog72/Ice0.38-19.11-RP
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [icefog72/Ice0.38-19.11-RP](https://huggingface.co/icefog72/Ice0.38-19.11-RP)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/icefog72__Ice0.38-19.11-RP-details\"\
,\n\tname=\"icefog72__Ice0.38-19.11-RP__leaderboard_bbh_boolean_expressions\",\n\
\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T00-18-14.118034](https://huggingface.co/datasets/open-llm-leaderboard/icefog72__Ice0.38-19.11-RP-details/blob/main/icefog72__Ice0.38-19.11-RP/results_2024-11-20T00-18-14.118034.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.46971072772084577,\n \"acc_norm_stderr,none\"\
: 0.005331921907928626,\n \"inst_level_loose_acc,none\": 0.5383693045563549,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.41035120147874304,\n \"prompt_level_loose_acc_stderr,none\": 0.021167895542791835,\n\
\ \"prompt_level_strict_acc,none\": 0.37707948243992606,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.020856233918528456,\n \"\
inst_level_strict_acc,none\": 0.5035971223021583,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.05740181268882175,\n \
\ \"exact_match_stderr,none\": 0.006280593877986637,\n \"acc,none\":\
\ 0.31399601063829785,\n \"acc_stderr,none\": 0.004231306863238141,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5082450963374414,\n \"acc_norm_stderr,none\"\
: 0.00614994171743953,\n \"alias\": \" - leaderboard_bbh\"\n },\n\
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" \
\ - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.812,\n\
\ \"acc_norm_stderr,none\": 0.02476037772775051\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6096256684491979,\n \"acc_norm_stderr,none\"\
: 0.03576973947986408\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.4,\n \"acc_norm_stderr,none\": 0.031046021028253316\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.684,\n\
\ \"acc_norm_stderr,none\": 0.02946265759857865\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\":\
\ 0.031235856237014505\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.472,\n \"acc_norm_stderr,none\": 0.031636489531544396\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.796,\n \
\ \"acc_norm_stderr,none\": 0.025537121574548162\n },\n \"\
leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\": \" \
\ - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.424,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\": 0.030186568464511673\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.632,\n \"acc_norm_stderr,none\": 0.03056207062099311\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.568,\n \"acc_norm_stderr,none\":\
\ 0.03139181076542941\n },\n \"leaderboard_bbh_object_counting\":\
\ {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\": 0.03063032594455827\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.4589041095890411,\n \"acc_norm_stderr,none\": 0.04138224905067309\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.508,\n \
\ \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
,\n \"acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\":\
\ 0.031425567060281365\n },\n \"leaderboard_bbh_snarks\": {\n \
\ \"alias\": \" - leaderboard_bbh_snarks\",\n \"acc_norm,none\"\
: 0.6460674157303371,\n \"acc_norm_stderr,none\": 0.03594285405211505\n\
\ },\n \"leaderboard_bbh_sports_understanding\": {\n \"\
alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.828,\n \"acc_norm_stderr,none\": 0.02391551394448624\n },\n\
\ \"leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" -\
\ leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.268,\n\
\ \"acc_norm_stderr,none\": 0.02806876238252672\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.296,\n \"acc_norm_stderr,none\":\
\ 0.028928939388379694\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.30453020134228187,\n\
\ \"acc_norm_stderr,none\": 0.013345501737643605,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.29292929292929293,\n \"acc_norm_stderr,none\": 0.032424979581788145\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.3058608058608059,\n\
\ \"acc_norm_stderr,none\": 0.019737263843674822\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3080357142857143,\n \"acc_norm_stderr,none\"\
: 0.021836780796366365\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.37707948243992606,\n \"prompt_level_strict_acc_stderr,none\": 0.020856233918528456,\n\
\ \"inst_level_strict_acc,none\": 0.5035971223021583,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.41035120147874304,\n \"prompt_level_loose_acc_stderr,none\": 0.021167895542791835,\n\
\ \"inst_level_loose_acc,none\": 0.5383693045563549,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.05740181268882175,\n \"exact_match_stderr,none\"\
: 0.006280593877986637,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.10749185667752444,\n\
\ \"exact_match_stderr,none\": 0.01770651789315074\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \" \
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.032520325203252036,\n \"exact_match_stderr,none\": 0.016058998205879745\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.022727272727272728,\n\
\ \"exact_match_stderr,none\": 0.0130210469090637\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\": \"\
\ - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.014285714285714285,\n \"exact_match_stderr,none\": 0.0071043508939153165\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.01948051948051948,\n\
\ \"exact_match_stderr,none\": 0.011173331005571083\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.12953367875647667,\n \"exact_match_stderr,none\"\
: 0.024233532297758688\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.02962962962962963,\n \"exact_match_stderr,none\"\
: 0.014648038602753809\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.31399601063829785,\n\
\ \"acc_stderr,none\": 0.0042313068632381405\n },\n \"\
leaderboard_musr\": {\n \"acc_norm,none\": 0.4365079365079365,\n \
\ \"acc_norm_stderr,none\": 0.01780565986031392,\n \"alias\":\
\ \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.41015625,\n\
\ \"acc_norm_stderr,none\": 0.030801585176036275\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.348,\n \"acc_norm_stderr,none\":\
\ 0.030186568464511673\n }\n },\n \"leaderboard\": {\n \"acc_norm,none\"\
: 0.46971072772084577,\n \"acc_norm_stderr,none\": 0.005331921907928626,\n\
\ \"inst_level_loose_acc,none\": 0.5383693045563549,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.41035120147874304,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.021167895542791835,\n \"prompt_level_strict_acc,none\"\
: 0.37707948243992606,\n \"prompt_level_strict_acc_stderr,none\": 0.020856233918528456,\n\
\ \"inst_level_strict_acc,none\": 0.5035971223021583,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.05740181268882175,\n \"exact_match_stderr,none\"\
: 0.006280593877986637,\n \"acc,none\": 0.31399601063829785,\n \"\
acc_stderr,none\": 0.004231306863238141,\n \"alias\": \"leaderboard\"\n \
\ },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.5082450963374414,\n\
\ \"acc_norm_stderr,none\": 0.00614994171743953,\n \"alias\": \" -\
\ leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n \
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.812,\n \"acc_norm_stderr,none\": 0.02476037772775051\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6096256684491979,\n \"acc_norm_stderr,none\"\
: 0.03576973947986408\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.4,\n \"acc_norm_stderr,none\": 0.031046021028253316\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\"\
: 0.684,\n \"acc_norm_stderr,none\": 0.02946265759857865\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.472,\n \"acc_norm_stderr,none\": 0.031636489531544396\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.796,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.424,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.652,\n \"acc_norm_stderr,none\": 0.030186568464511673\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.632,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.568,\n \"acc_norm_stderr,none\": 0.03139181076542941\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\": 0.03063032594455827\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.4589041095890411,\n\
\ \"acc_norm_stderr,none\": 0.04138224905067309\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6460674157303371,\n \"acc_norm_stderr,none\"\
: 0.03594285405211505\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.828,\n \"acc_norm_stderr,none\": 0.02391551394448624\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.268,\n \"acc_norm_stderr,none\": 0.02806876238252672\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060626\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.296,\n \"acc_norm_stderr,none\": 0.028928939388379694\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.30453020134228187,\n\
\ \"acc_norm_stderr,none\": 0.013345501737643605,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.29292929292929293,\n\
\ \"acc_norm_stderr,none\": 0.032424979581788145\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.3058608058608059,\n \"acc_norm_stderr,none\": 0.019737263843674822\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3080357142857143,\n \"acc_norm_stderr,none\"\
: 0.021836780796366365\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.37707948243992606,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.020856233918528456,\n \
\ \"inst_level_strict_acc,none\": 0.5035971223021583,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.41035120147874304,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.021167895542791835,\n \"inst_level_loose_acc,none\"\
: 0.5383693045563549,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.05740181268882175,\n\
\ \"exact_match_stderr,none\": 0.006280593877986637,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.10749185667752444,\n \"exact_match_stderr,none\": 0.01770651789315074\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.032520325203252036,\n \"exact_match_stderr,none\": 0.016058998205879745\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.022727272727272728,\n \"exact_match_stderr,none\"\
: 0.0130210469090637\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.014285714285714285,\n \"exact_match_stderr,none\"\
: 0.0071043508939153165\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.01948051948051948,\n \"exact_match_stderr,none\": 0.011173331005571083\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.12953367875647667,\n \"exact_match_stderr,none\"\
: 0.024233532297758688\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.02962962962962963,\n \"exact_match_stderr,none\": 0.014648038602753809\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.31399601063829785,\n \"acc_stderr,none\": 0.0042313068632381405\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.4365079365079365,\n\
\ \"acc_norm_stderr,none\": 0.01780565986031392,\n \"alias\": \" -\
\ leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.41015625,\n \"acc_norm_stderr,none\": 0.030801585176036275\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.348,\n \"acc_norm_stderr,none\": 0.030186568464511673\n\
\ }\n}\n```"
repo_url: https://huggingface.co/icefog72/Ice0.38-19.11-RP
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_ifeval
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-18-14.118034.jsonl'
- config_name: icefog72__Ice0.38-19.11-RP__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T00_18_14.118034
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-18-14.118034.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-18-14.118034.jsonl'
---
# Dataset Card for Evaluation run of icefog72/Ice0.38-19.11-RP
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [icefog72/Ice0.38-19.11-RP](https://huggingface.co/icefog72/Ice0.38-19.11-RP)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/icefog72__Ice0.38-19.11-RP-details",
name="icefog72__Ice0.38-19.11-RP__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T00-18-14.118034](https://huggingface.co/datasets/open-llm-leaderboard/icefog72__Ice0.38-19.11-RP-details/blob/main/icefog72__Ice0.38-19.11-RP/results_2024-11-20T00-18-14.118034.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.46971072772084577,
"acc_norm_stderr,none": 0.005331921907928626,
"inst_level_loose_acc,none": 0.5383693045563549,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.41035120147874304,
"prompt_level_loose_acc_stderr,none": 0.021167895542791835,
"prompt_level_strict_acc,none": 0.37707948243992606,
"prompt_level_strict_acc_stderr,none": 0.020856233918528456,
"inst_level_strict_acc,none": 0.5035971223021583,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.05740181268882175,
"exact_match_stderr,none": 0.006280593877986637,
"acc,none": 0.31399601063829785,
"acc_stderr,none": 0.004231306863238141,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5082450963374414,
"acc_norm_stderr,none": 0.00614994171743953,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.812,
"acc_norm_stderr,none": 0.02476037772775051
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6096256684491979,
"acc_norm_stderr,none": 0.03576973947986408
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.4,
"acc_norm_stderr,none": 0.031046021028253316
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.684,
"acc_norm_stderr,none": 0.02946265759857865
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.472,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.796,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.424,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.632,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.03063032594455827
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4589041095890411,
"acc_norm_stderr,none": 0.04138224905067309
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6460674157303371,
"acc_norm_stderr,none": 0.03594285405211505
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.828,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.268,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.296,
"acc_norm_stderr,none": 0.028928939388379694
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_gpqa": {
"acc_norm,none": 0.30453020134228187,
"acc_norm_stderr,none": 0.013345501737643605,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.29292929292929293,
"acc_norm_stderr,none": 0.032424979581788145
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3058608058608059,
"acc_norm_stderr,none": 0.019737263843674822
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3080357142857143,
"acc_norm_stderr,none": 0.021836780796366365
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.37707948243992606,
"prompt_level_strict_acc_stderr,none": 0.020856233918528456,
"inst_level_strict_acc,none": 0.5035971223021583,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.41035120147874304,
"prompt_level_loose_acc_stderr,none": 0.021167895542791835,
"inst_level_loose_acc,none": 0.5383693045563549,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.05740181268882175,
"exact_match_stderr,none": 0.006280593877986637,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.10749185667752444,
"exact_match_stderr,none": 0.01770651789315074
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.032520325203252036,
"exact_match_stderr,none": 0.016058998205879745
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.0130210469090637
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.014285714285714285,
"exact_match_stderr,none": 0.0071043508939153165
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.01948051948051948,
"exact_match_stderr,none": 0.011173331005571083
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.12953367875647667,
"exact_match_stderr,none": 0.024233532297758688
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.02962962962962963,
"exact_match_stderr,none": 0.014648038602753809
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.31399601063829785,
"acc_stderr,none": 0.0042313068632381405
},
"leaderboard_musr": {
"acc_norm,none": 0.4365079365079365,
"acc_norm_stderr,none": 0.01780565986031392,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.41015625,
"acc_norm_stderr,none": 0.030801585176036275
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.348,
"acc_norm_stderr,none": 0.030186568464511673
}
},
"leaderboard": {
"acc_norm,none": 0.46971072772084577,
"acc_norm_stderr,none": 0.005331921907928626,
"inst_level_loose_acc,none": 0.5383693045563549,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.41035120147874304,
"prompt_level_loose_acc_stderr,none": 0.021167895542791835,
"prompt_level_strict_acc,none": 0.37707948243992606,
"prompt_level_strict_acc_stderr,none": 0.020856233918528456,
"inst_level_strict_acc,none": 0.5035971223021583,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.05740181268882175,
"exact_match_stderr,none": 0.006280593877986637,
"acc,none": 0.31399601063829785,
"acc_stderr,none": 0.004231306863238141,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5082450963374414,
"acc_norm_stderr,none": 0.00614994171743953,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.812,
"acc_norm_stderr,none": 0.02476037772775051
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6096256684491979,
"acc_norm_stderr,none": 0.03576973947986408
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.4,
"acc_norm_stderr,none": 0.031046021028253316
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.684,
"acc_norm_stderr,none": 0.02946265759857865
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.472,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.796,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.424,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.632,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.03063032594455827
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4589041095890411,
"acc_norm_stderr,none": 0.04138224905067309
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6460674157303371,
"acc_norm_stderr,none": 0.03594285405211505
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.828,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.268,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060626
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.296,
"acc_norm_stderr,none": 0.028928939388379694
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_gpqa": {
"acc_norm,none": 0.30453020134228187,
"acc_norm_stderr,none": 0.013345501737643605,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.29292929292929293,
"acc_norm_stderr,none": 0.032424979581788145
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3058608058608059,
"acc_norm_stderr,none": 0.019737263843674822
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3080357142857143,
"acc_norm_stderr,none": 0.021836780796366365
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.37707948243992606,
"prompt_level_strict_acc_stderr,none": 0.020856233918528456,
"inst_level_strict_acc,none": 0.5035971223021583,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.41035120147874304,
"prompt_level_loose_acc_stderr,none": 0.021167895542791835,
"inst_level_loose_acc,none": 0.5383693045563549,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.05740181268882175,
"exact_match_stderr,none": 0.006280593877986637,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.10749185667752444,
"exact_match_stderr,none": 0.01770651789315074
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.032520325203252036,
"exact_match_stderr,none": 0.016058998205879745
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.0130210469090637
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.014285714285714285,
"exact_match_stderr,none": 0.0071043508939153165
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.01948051948051948,
"exact_match_stderr,none": 0.011173331005571083
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.12953367875647667,
"exact_match_stderr,none": 0.024233532297758688
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.02962962962962963,
"exact_match_stderr,none": 0.014648038602753809
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.31399601063829785,
"acc_stderr,none": 0.0042313068632381405
},
"leaderboard_musr": {
"acc_norm,none": 0.4365079365079365,
"acc_norm_stderr,none": 0.01780565986031392,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.41015625,
"acc_norm_stderr,none": 0.030801585176036275
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.348,
"acc_norm_stderr,none": 0.030186568464511673
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/openbmb__MiniCPM-S-1B-sft-llama-format-details | open-llm-leaderboard | "2024-11-20T00:24:51Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:21:38Z" | ---
pretty_name: Evaluation run of openbmb/MiniCPM-S-1B-sft-llama-format
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [openbmb/MiniCPM-S-1B-sft-llama-format](https://huggingface.co/openbmb/MiniCPM-S-1B-sft-llama-format)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/openbmb__MiniCPM-S-1B-sft-llama-format-details\"\
,\n\tname=\"openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T00-21-38.166355](https://huggingface.co/datasets/open-llm-leaderboard/openbmb__MiniCPM-S-1B-sft-llama-format-details/blob/main/openbmb__MiniCPM-S-1B-sft-llama-format/results_2024-11-20T00-21-38.166355.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.29977947853158643,\n \"acc_norm_stderr,none\"\
: 0.004968061687202896,\n \"exact_match,none\": 0.023413897280966767,\n\
\ \"exact_match_stderr,none\": 0.004132734908348066,\n \"\
prompt_level_loose_acc,none\": 0.2902033271719039,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.019530856691222623,\n \"acc,none\": 0.1858377659574468,\n \
\ \"acc_stderr,none\": 0.0035462683919936927,\n \"inst_level_loose_acc,none\"\
: 0.4052757793764988,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"inst_level_strict_acc,none\": 0.38848920863309355,\n \
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.27726432532347506,\n \"prompt_level_strict_acc_stderr,none\": 0.019263706963479336,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.3016837354625933,\n \"acc_norm_stderr,none\"\
: 0.005695006269710216,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.46,\n\
\ \"acc_norm_stderr,none\": 0.031584653891499004\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.588,\n\
\ \"acc_norm_stderr,none\": 0.031191596026022818\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.472,\n \"acc_norm_stderr,none\":\
\ 0.031636489531544396\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.0,\n \"acc_norm_stderr,none\": 0.0\n \
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.484,\n \"acc_norm_stderr,none\":\
\ 0.03166998503010743\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\":\
\ 0.024960691989171963\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.148,\n \"acc_norm_stderr,none\":\
\ 0.022503547243806186\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.228,\n \"acc_norm_stderr,none\": 0.026587432487268498\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.096,\n\
\ \"acc_norm_stderr,none\": 0.01866896141947719\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.23972602739726026,\n \"acc_norm_stderr,none\"\
: 0.035453462375110385\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\":\
\ 0.02391551394448624\n },\n \"leaderboard_bbh_ruin_names\": {\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.47191011235955055,\n\
\ \"acc_norm_stderr,none\": 0.03752294651708463\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.284,\n\
\ \"acc_norm_stderr,none\": 0.02857695873043744\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\":\
\ 0.022249407735450245\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\":\
\ 0.029462657598578648\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2709731543624161,\n\
\ \"acc_norm_stderr,none\": 0.012881640233458587,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.23737373737373738,\n \"acc_norm_stderr,none\": 0.030313710538198924\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.27289377289377287,\n\
\ \"acc_norm_stderr,none\": 0.019080840171987832\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28348214285714285,\n \"acc_norm_stderr,none\"\
: 0.0213168289872622\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.27726432532347506,\n \"prompt_level_strict_acc_stderr,none\": 0.019263706963479336,\n\
\ \"inst_level_strict_acc,none\": 0.38848920863309355,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.2902033271719039,\n \"prompt_level_loose_acc_stderr,none\": 0.019530856691222623,\n\
\ \"inst_level_loose_acc,none\": 0.4052757793764988,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.023413897280966767,\n \"exact_match_stderr,none\"\
: 0.004132734908348066,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.05211726384364821,\n\
\ \"exact_match_stderr,none\": 0.012705957463941452\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.007575757575757576,\n\
\ \"exact_match_stderr,none\": 0.007575757575757577\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0035714285714285713,\n \"exact_match_stderr,none\": 0.0035714285714285713\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.025974025974025976,\n\
\ \"exact_match_stderr,none\": 0.012859058999697068\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.03626943005181347,\n \"exact_match_stderr,none\"\
: 0.013492659751295115\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.007407407407407408,\n \"exact_match_stderr,none\"\
: 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.1858377659574468,\n\
\ \"acc_stderr,none\": 0.0035462683919936927\n },\n \"\
leaderboard_musr\": {\n \"acc_norm,none\": 0.3306878306878307,\n \
\ \"acc_norm_stderr,none\": 0.016445834778089977,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.20703125,\n\
\ \"acc_norm_stderr,none\": 0.025373238296688486\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.272,\n \"acc_norm_stderr,none\":\
\ 0.028200088296309975\n }\n },\n \"leaderboard\": {\n \"acc_norm,none\"\
: 0.29977947853158643,\n \"acc_norm_stderr,none\": 0.004968061687202896,\n\
\ \"exact_match,none\": 0.023413897280966767,\n \"exact_match_stderr,none\"\
: 0.004132734908348066,\n \"prompt_level_loose_acc,none\": 0.2902033271719039,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.019530856691222623,\n \
\ \"acc,none\": 0.1858377659574468,\n \"acc_stderr,none\": 0.0035462683919936927,\n\
\ \"inst_level_loose_acc,none\": 0.4052757793764988,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"inst_level_strict_acc,none\": 0.38848920863309355,\n \
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.27726432532347506,\n \"prompt_level_strict_acc_stderr,none\": 0.019263706963479336,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.3016837354625933,\n \"acc_norm_stderr,none\": 0.005695006269710216,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537843\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022818\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.472,\n \"acc_norm_stderr,none\": 0.031636489531544396\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.0,\n \"acc_norm_stderr,none\": 0.0\n },\n\
\ \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.484,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.148,\n \"acc_norm_stderr,none\": 0.022503547243806186\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.32,\n \"acc_norm_stderr,none\": 0.029561724955240978\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.228,\n \"acc_norm_stderr,none\": 0.026587432487268498\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.096,\n \"acc_norm_stderr,none\": 0.01866896141947719\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.23972602739726026,\n\
\ \"acc_norm_stderr,none\": 0.035453462375110385\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.47191011235955055,\n \"acc_norm_stderr,none\"\
: 0.03752294651708463\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2709731543624161,\n\
\ \"acc_norm_stderr,none\": 0.012881640233458587,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.23737373737373738,\n\
\ \"acc_norm_stderr,none\": 0.030313710538198924\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.27289377289377287,\n \"acc_norm_stderr,none\": 0.019080840171987832\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.28348214285714285,\n \"acc_norm_stderr,none\"\
: 0.0213168289872622\n },\n \"leaderboard_ifeval\": {\n \"alias\":\
\ \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.27726432532347506,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.019263706963479336,\n \
\ \"inst_level_strict_acc,none\": 0.38848920863309355,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.2902033271719039,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.019530856691222623,\n \"inst_level_loose_acc,none\"\
: 0.4052757793764988,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.023413897280966767,\n\
\ \"exact_match_stderr,none\": 0.004132734908348066,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.05211726384364821,\n \"exact_match_stderr,none\": 0.012705957463941452\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.007575757575757576,\n \"exact_match_stderr,none\"\
: 0.007575757575757577\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0035714285714285713,\n \"exact_match_stderr,none\"\
: 0.0035714285714285713\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.025974025974025976,\n \"exact_match_stderr,none\": 0.012859058999697068\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.03626943005181347,\n \"exact_match_stderr,none\"\
: 0.013492659751295115\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.007407407407407408,\n \"exact_match_stderr,none\": 0.007407407407407408\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.1858377659574468,\n \"acc_stderr,none\": 0.0035462683919936927\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.3306878306878307,\n\
\ \"acc_norm_stderr,none\": 0.016445834778089977,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.20703125,\n \"acc_norm_stderr,none\": 0.025373238296688486\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.272,\n \"acc_norm_stderr,none\": 0.028200088296309975\n\
\ }\n}\n```"
repo_url: https://huggingface.co/openbmb/MiniCPM-S-1B-sft-llama-format
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_ifeval
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-21-38.166355.jsonl'
- config_name: openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T00_21_38.166355
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-21-38.166355.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-21-38.166355.jsonl'
---
# Dataset Card for Evaluation run of openbmb/MiniCPM-S-1B-sft-llama-format
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [openbmb/MiniCPM-S-1B-sft-llama-format](https://huggingface.co/openbmb/MiniCPM-S-1B-sft-llama-format)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/openbmb__MiniCPM-S-1B-sft-llama-format-details",
name="openbmb__MiniCPM-S-1B-sft-llama-format__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T00-21-38.166355](https://huggingface.co/datasets/open-llm-leaderboard/openbmb__MiniCPM-S-1B-sft-llama-format-details/blob/main/openbmb__MiniCPM-S-1B-sft-llama-format/results_2024-11-20T00-21-38.166355.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.29977947853158643,
"acc_norm_stderr,none": 0.004968061687202896,
"exact_match,none": 0.023413897280966767,
"exact_match_stderr,none": 0.004132734908348066,
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.019530856691222623,
"acc,none": 0.1858377659574468,
"acc_stderr,none": 0.0035462683919936927,
"inst_level_loose_acc,none": 0.4052757793764988,
"inst_level_loose_acc_stderr,none": "N/A",
"inst_level_strict_acc,none": 0.38848920863309355,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.27726432532347506,
"prompt_level_strict_acc_stderr,none": 0.019263706963479336,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3016837354625933,
"acc_norm_stderr,none": 0.005695006269710216,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.472,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.0,
"acc_norm_stderr,none": 0.0
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.148,
"acc_norm_stderr,none": 0.022503547243806186
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.228,
"acc_norm_stderr,none": 0.026587432487268498
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.096,
"acc_norm_stderr,none": 0.01866896141947719
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.23972602739726026,
"acc_norm_stderr,none": 0.035453462375110385
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.47191011235955055,
"acc_norm_stderr,none": 0.03752294651708463
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2709731543624161,
"acc_norm_stderr,none": 0.012881640233458587,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.23737373737373738,
"acc_norm_stderr,none": 0.030313710538198924
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.27289377289377287,
"acc_norm_stderr,none": 0.019080840171987832
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28348214285714285,
"acc_norm_stderr,none": 0.0213168289872622
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.27726432532347506,
"prompt_level_strict_acc_stderr,none": 0.019263706963479336,
"inst_level_strict_acc,none": 0.38848920863309355,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.019530856691222623,
"inst_level_loose_acc,none": 0.4052757793764988,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.023413897280966767,
"exact_match_stderr,none": 0.004132734908348066,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.05211726384364821,
"exact_match_stderr,none": 0.012705957463941452
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285713
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.025974025974025976,
"exact_match_stderr,none": 0.012859058999697068
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.03626943005181347,
"exact_match_stderr,none": 0.013492659751295115
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.1858377659574468,
"acc_stderr,none": 0.0035462683919936927
},
"leaderboard_musr": {
"acc_norm,none": 0.3306878306878307,
"acc_norm_stderr,none": 0.016445834778089977,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.20703125,
"acc_norm_stderr,none": 0.025373238296688486
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.272,
"acc_norm_stderr,none": 0.028200088296309975
}
},
"leaderboard": {
"acc_norm,none": 0.29977947853158643,
"acc_norm_stderr,none": 0.004968061687202896,
"exact_match,none": 0.023413897280966767,
"exact_match_stderr,none": 0.004132734908348066,
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.019530856691222623,
"acc,none": 0.1858377659574468,
"acc_stderr,none": 0.0035462683919936927,
"inst_level_loose_acc,none": 0.4052757793764988,
"inst_level_loose_acc_stderr,none": "N/A",
"inst_level_strict_acc,none": 0.38848920863309355,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.27726432532347506,
"prompt_level_strict_acc_stderr,none": 0.019263706963479336,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3016837354625933,
"acc_norm_stderr,none": 0.005695006269710216,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.472,
"acc_norm_stderr,none": 0.031636489531544396
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.0,
"acc_norm_stderr,none": 0.0
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.148,
"acc_norm_stderr,none": 0.022503547243806186
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955240978
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.228,
"acc_norm_stderr,none": 0.026587432487268498
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.096,
"acc_norm_stderr,none": 0.01866896141947719
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.23972602739726026,
"acc_norm_stderr,none": 0.035453462375110385
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.47191011235955055,
"acc_norm_stderr,none": 0.03752294651708463
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2709731543624161,
"acc_norm_stderr,none": 0.012881640233458587,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.23737373737373738,
"acc_norm_stderr,none": 0.030313710538198924
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.27289377289377287,
"acc_norm_stderr,none": 0.019080840171987832
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.28348214285714285,
"acc_norm_stderr,none": 0.0213168289872622
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.27726432532347506,
"prompt_level_strict_acc_stderr,none": 0.019263706963479336,
"inst_level_strict_acc,none": 0.38848920863309355,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.019530856691222623,
"inst_level_loose_acc,none": 0.4052757793764988,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.023413897280966767,
"exact_match_stderr,none": 0.004132734908348066,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.05211726384364821,
"exact_match_stderr,none": 0.012705957463941452
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285713
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.025974025974025976,
"exact_match_stderr,none": 0.012859058999697068
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.03626943005181347,
"exact_match_stderr,none": 0.013492659751295115
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.1858377659574468,
"acc_stderr,none": 0.0035462683919936927
},
"leaderboard_musr": {
"acc_norm,none": 0.3306878306878307,
"acc_norm_stderr,none": 0.016445834778089977,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.20703125,
"acc_norm_stderr,none": 0.025373238296688486
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.272,
"acc_norm_stderr,none": 0.028200088296309975
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/SicariusSicariiStuff__dn_ep02-details | open-llm-leaderboard | "2024-11-20T00:33:30Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:30:03Z" | ---
pretty_name: Evaluation run of SicariusSicariiStuff/dn_ep02
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [SicariusSicariiStuff/dn_ep02](https://huggingface.co/SicariusSicariiStuff/dn_ep02)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/SicariusSicariiStuff__dn_ep02-details\"\
,\n\tname=\"SicariusSicariiStuff__dn_ep02__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T00-30-02.747981](https://huggingface.co/datasets/open-llm-leaderboard/SicariusSicariiStuff__dn_ep02-details/blob/main/SicariusSicariiStuff__dn_ep02/results_2024-11-20T00-30-02.747981.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"prompt_level_strict_acc,none\": 0.43253234750462105,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.021319792398178095,\n \"\
acc_norm,none\": 0.4834608898689843,\n \"acc_norm_stderr,none\": 0.005401676902860125,\n\
\ \"prompt_level_loose_acc,none\": 0.4805914972273567,\n \"\
prompt_level_loose_acc_stderr,none\": 0.021500357879025087,\n \"inst_level_strict_acc,none\"\
: 0.580335731414868,\n \"inst_level_strict_acc_stderr,none\": \"N/A\"\
,\n \"exact_match,none\": 0.14123867069486404,\n \"exact_match_stderr,none\"\
: 0.009071964964291027,\n \"acc,none\": 0.39976728723404253,\n \
\ \"acc_stderr,none\": 0.004465937013437842,\n \"inst_level_loose_acc,none\"\
: 0.6211031175059952,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5250824509633745,\n \"acc_norm_stderr,none\"\
: 0.006245185056833883,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.82,\n\
\ \"acc_norm_stderr,none\": 0.02434689065029351\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5828877005347594,\n \"acc_norm_stderr,none\"\
: 0.0361545093114083\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.576,\n\
\ \"acc_norm_stderr,none\": 0.03131803437491622\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.568,\n \"acc_norm_stderr,none\":\
\ 0.03139181076542941\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.344,\n \"acc_norm_stderr,none\": 0.03010450339231644\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.688,\n \
\ \"acc_norm_stderr,none\": 0.029361067575219852\n },\n \"\
leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\": \" \
\ - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.408,\n \"acc_norm_stderr,none\": 0.031145209846548512\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.628,\n \"acc_norm_stderr,none\": 0.03063032594455827\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.664,\n \"acc_norm_stderr,none\": 0.029933259094191533\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.624,\n \"acc_norm_stderr,none\":\
\ 0.03069633626739458\n },\n \"leaderboard_bbh_object_counting\":\
\ {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.44,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.4589041095890411,\n \"acc_norm_stderr,none\": 0.04138224905067309\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028136\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.696,\n \
\ \"acc_norm_stderr,none\": 0.029150213374159652\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6966292134831461,\n \"acc_norm_stderr,none\"\
: 0.03455421944400101\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.728,\n \"acc_norm_stderr,none\": 0.028200088296309975\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.484,\n\
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\":\
\ 0.023692813205492536\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\":\
\ 0.02993325909419153\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028136\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.31543624161073824,\n\
\ \"acc_norm_stderr,none\": 0.013462979686925527,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.3383838383838384,\n \"acc_norm_stderr,none\": 0.033711241426263\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.29304029304029305,\n\
\ \"acc_norm_stderr,none\": 0.019496773654296876\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3325892857142857,\n \"acc_norm_stderr,none\"\
: 0.022284195136714192\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.43253234750462105,\n \"prompt_level_strict_acc_stderr,none\": 0.021319792398178095,\n\
\ \"inst_level_strict_acc,none\": 0.580335731414868,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.4805914972273567,\n \"prompt_level_loose_acc_stderr,none\": 0.021500357879025083,\n\
\ \"inst_level_loose_acc,none\": 0.6211031175059952,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.14123867069486404,\n \"exact_match_stderr,none\"\
: 0.009071964964291027,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.28013029315960913,\n\
\ \"exact_match_stderr,none\": 0.025671206118429726\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.06504065040650407,\n \"exact_match_stderr,none\": 0.022325895462591904\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.045454545454545456,\n\
\ \"exact_match_stderr,none\": 0.018199158975632696\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.017857142857142856,\n \"exact_match_stderr,none\": 0.007928503387888855\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.14285714285714285,\n\
\ \"exact_match_stderr,none\": 0.028289929799333556\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.27979274611398963,\n \"exact_match_stderr,none\"\
: 0.032396370467357\n },\n \"leaderboard_math_precalculus_hard\":\
\ {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.044444444444444446,\n \"exact_match_stderr,none\"\
: 0.01780263602032457\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.39976728723404253,\n\
\ \"acc_stderr,none\": 0.004465937013437842\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4312169312169312,\n \"acc_norm_stderr,none\"\
: 0.017847072044090258,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.536,\n\
\ \"acc_norm_stderr,none\": 0.031603975145223735\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.37890625,\n \"acc_norm_stderr,none\"\
: 0.030379062946922643\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ }\n },\n \"leaderboard\": {\n \"prompt_level_strict_acc,none\"\
: 0.43253234750462105,\n \"prompt_level_strict_acc_stderr,none\": 0.021319792398178095,\n\
\ \"acc_norm,none\": 0.4834608898689843,\n \"acc_norm_stderr,none\"\
: 0.005401676902860125,\n \"prompt_level_loose_acc,none\": 0.4805914972273567,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.021500357879025087,\n \
\ \"inst_level_strict_acc,none\": 0.580335731414868,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.14123867069486404,\n \"exact_match_stderr,none\"\
: 0.009071964964291027,\n \"acc,none\": 0.39976728723404253,\n \"\
acc_stderr,none\": 0.004465937013437842,\n \"inst_level_loose_acc,none\"\
: 0.6211031175059952,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.5250824509633745,\n \"acc_norm_stderr,none\": 0.006245185056833883,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.82,\n \"acc_norm_stderr,none\": 0.02434689065029351\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5828877005347594,\n \"acc_norm_stderr,none\"\
: 0.0361545093114083\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401222\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.568,\n \"acc_norm_stderr,none\": 0.03139181076542941\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.344,\n \"acc_norm_stderr,none\": 0.03010450339231644\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.688,\n \"acc_norm_stderr,none\": 0.029361067575219852\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.408,\n \"acc_norm_stderr,none\": 0.031145209846548512\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.628,\n \"acc_norm_stderr,none\": 0.03063032594455827\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.664,\n \"acc_norm_stderr,none\": 0.029933259094191533\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.624,\n \"acc_norm_stderr,none\": 0.03069633626739458\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.44,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.4589041095890411,\n\
\ \"acc_norm_stderr,none\": 0.04138224905067309\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028136\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.696,\n \"acc_norm_stderr,none\": 0.029150213374159652\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6966292134831461,\n \"acc_norm_stderr,none\"\
: 0.03455421944400101\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.728,\n \"acc_norm_stderr,none\": 0.028200088296309975\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.484,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605793\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\": 0.023692813205492536\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\": 0.02993325909419153\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028136\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.31543624161073824,\n\
\ \"acc_norm_stderr,none\": 0.013462979686925527,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.3383838383838384,\n\
\ \"acc_norm_stderr,none\": 0.033711241426263\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.29304029304029305,\n \"acc_norm_stderr,none\": 0.019496773654296876\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3325892857142857,\n \"acc_norm_stderr,none\"\
: 0.022284195136714192\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.43253234750462105,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.021319792398178095,\n \
\ \"inst_level_strict_acc,none\": 0.580335731414868,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.4805914972273567,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.021500357879025083,\n \"inst_level_loose_acc,none\"\
: 0.6211031175059952,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.14123867069486404,\n\
\ \"exact_match_stderr,none\": 0.009071964964291027,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.28013029315960913,\n \"exact_match_stderr,none\": 0.025671206118429726\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.06504065040650407,\n \"exact_match_stderr,none\": 0.022325895462591904\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.045454545454545456,\n \"exact_match_stderr,none\"\
: 0.018199158975632696\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.017857142857142856,\n \"exact_match_stderr,none\"\
: 0.007928503387888855\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.14285714285714285,\n \"exact_match_stderr,none\": 0.028289929799333556\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.27979274611398963,\n \"exact_match_stderr,none\"\
: 0.032396370467357\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.044444444444444446,\n \"exact_match_stderr,none\": 0.01780263602032457\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.39976728723404253,\n \"acc_stderr,none\": 0.004465937013437842\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.4312169312169312,\n\
\ \"acc_norm_stderr,none\": 0.017847072044090258,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.536,\n \"acc_norm_stderr,none\": 0.031603975145223735\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.37890625,\n \"acc_norm_stderr,none\": 0.030379062946922643\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ }\n}\n```"
repo_url: https://huggingface.co/SicariusSicariiStuff/dn_ep02
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_ifeval
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-30-02.747981.jsonl'
- config_name: SicariusSicariiStuff__dn_ep02__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T00_30_02.747981
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-30-02.747981.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-30-02.747981.jsonl'
---
# Dataset Card for Evaluation run of SicariusSicariiStuff/dn_ep02
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [SicariusSicariiStuff/dn_ep02](https://huggingface.co/SicariusSicariiStuff/dn_ep02)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/SicariusSicariiStuff__dn_ep02-details",
name="SicariusSicariiStuff__dn_ep02__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T00-30-02.747981](https://huggingface.co/datasets/open-llm-leaderboard/SicariusSicariiStuff__dn_ep02-details/blob/main/SicariusSicariiStuff__dn_ep02/results_2024-11-20T00-30-02.747981.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"prompt_level_strict_acc,none": 0.43253234750462105,
"prompt_level_strict_acc_stderr,none": 0.021319792398178095,
"acc_norm,none": 0.4834608898689843,
"acc_norm_stderr,none": 0.005401676902860125,
"prompt_level_loose_acc,none": 0.4805914972273567,
"prompt_level_loose_acc_stderr,none": 0.021500357879025087,
"inst_level_strict_acc,none": 0.580335731414868,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.14123867069486404,
"exact_match_stderr,none": 0.009071964964291027,
"acc,none": 0.39976728723404253,
"acc_stderr,none": 0.004465937013437842,
"inst_level_loose_acc,none": 0.6211031175059952,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5250824509633745,
"acc_norm_stderr,none": 0.006245185056833883,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.82,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5828877005347594,
"acc_norm_stderr,none": 0.0361545093114083
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.344,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.688,
"acc_norm_stderr,none": 0.029361067575219852
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.408,
"acc_norm_stderr,none": 0.031145209846548512
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.03063032594455827
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.624,
"acc_norm_stderr,none": 0.03069633626739458
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4589041095890411,
"acc_norm_stderr,none": 0.04138224905067309
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.696,
"acc_norm_stderr,none": 0.029150213374159652
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6966292134831461,
"acc_norm_stderr,none": 0.03455421944400101
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_gpqa": {
"acc_norm,none": 0.31543624161073824,
"acc_norm_stderr,none": 0.013462979686925527,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.3383838383838384,
"acc_norm_stderr,none": 0.033711241426263
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.29304029304029305,
"acc_norm_stderr,none": 0.019496773654296876
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3325892857142857,
"acc_norm_stderr,none": 0.022284195136714192
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.43253234750462105,
"prompt_level_strict_acc_stderr,none": 0.021319792398178095,
"inst_level_strict_acc,none": 0.580335731414868,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.4805914972273567,
"prompt_level_loose_acc_stderr,none": 0.021500357879025083,
"inst_level_loose_acc,none": 0.6211031175059952,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.14123867069486404,
"exact_match_stderr,none": 0.009071964964291027,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.28013029315960913,
"exact_match_stderr,none": 0.025671206118429726
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.06504065040650407,
"exact_match_stderr,none": 0.022325895462591904
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.018199158975632696
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.017857142857142856,
"exact_match_stderr,none": 0.007928503387888855
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.14285714285714285,
"exact_match_stderr,none": 0.028289929799333556
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.27979274611398963,
"exact_match_stderr,none": 0.032396370467357
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.044444444444444446,
"exact_match_stderr,none": 0.01780263602032457
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.39976728723404253,
"acc_stderr,none": 0.004465937013437842
},
"leaderboard_musr": {
"acc_norm,none": 0.4312169312169312,
"acc_norm_stderr,none": 0.017847072044090258,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.37890625,
"acc_norm_stderr,none": 0.030379062946922643
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
}
},
"leaderboard": {
"prompt_level_strict_acc,none": 0.43253234750462105,
"prompt_level_strict_acc_stderr,none": 0.021319792398178095,
"acc_norm,none": 0.4834608898689843,
"acc_norm_stderr,none": 0.005401676902860125,
"prompt_level_loose_acc,none": 0.4805914972273567,
"prompt_level_loose_acc_stderr,none": 0.021500357879025087,
"inst_level_strict_acc,none": 0.580335731414868,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.14123867069486404,
"exact_match_stderr,none": 0.009071964964291027,
"acc,none": 0.39976728723404253,
"acc_stderr,none": 0.004465937013437842,
"inst_level_loose_acc,none": 0.6211031175059952,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5250824509633745,
"acc_norm_stderr,none": 0.006245185056833883,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.82,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5828877005347594,
"acc_norm_stderr,none": 0.0361545093114083
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.344,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.688,
"acc_norm_stderr,none": 0.029361067575219852
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.408,
"acc_norm_stderr,none": 0.031145209846548512
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.03063032594455827
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.624,
"acc_norm_stderr,none": 0.03069633626739458
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4589041095890411,
"acc_norm_stderr,none": 0.04138224905067309
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.696,
"acc_norm_stderr,none": 0.029150213374159652
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6966292134831461,
"acc_norm_stderr,none": 0.03455421944400101
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605793
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028136
},
"leaderboard_gpqa": {
"acc_norm,none": 0.31543624161073824,
"acc_norm_stderr,none": 0.013462979686925527,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.3383838383838384,
"acc_norm_stderr,none": 0.033711241426263
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.29304029304029305,
"acc_norm_stderr,none": 0.019496773654296876
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3325892857142857,
"acc_norm_stderr,none": 0.022284195136714192
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.43253234750462105,
"prompt_level_strict_acc_stderr,none": 0.021319792398178095,
"inst_level_strict_acc,none": 0.580335731414868,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.4805914972273567,
"prompt_level_loose_acc_stderr,none": 0.021500357879025083,
"inst_level_loose_acc,none": 0.6211031175059952,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.14123867069486404,
"exact_match_stderr,none": 0.009071964964291027,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.28013029315960913,
"exact_match_stderr,none": 0.025671206118429726
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.06504065040650407,
"exact_match_stderr,none": 0.022325895462591904
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.018199158975632696
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.017857142857142856,
"exact_match_stderr,none": 0.007928503387888855
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.14285714285714285,
"exact_match_stderr,none": 0.028289929799333556
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.27979274611398963,
"exact_match_stderr,none": 0.032396370467357
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.044444444444444446,
"exact_match_stderr,none": 0.01780263602032457
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.39976728723404253,
"acc_stderr,none": 0.004465937013437842
},
"leaderboard_musr": {
"acc_norm,none": 0.4312169312169312,
"acc_norm_stderr,none": 0.017847072044090258,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.031603975145223735
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.37890625,
"acc_norm_stderr,none": 0.030379062946922643
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/OEvortex__HelpingAI2.5-10B-details | open-llm-leaderboard | "2024-11-20T00:34:02Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:30:08Z" | ---
pretty_name: Evaluation run of OEvortex/HelpingAI2.5-10B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [OEvortex/HelpingAI2.5-10B](https://huggingface.co/OEvortex/HelpingAI2.5-10B)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/OEvortex__HelpingAI2.5-10B-details\"\
,\n\tname=\"OEvortex__HelpingAI2.5-10B__leaderboard_bbh_boolean_expressions\",\n\
\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T00-30-08.208806](https://huggingface.co/datasets/open-llm-leaderboard/OEvortex__HelpingAI2.5-10B-details/blob/main/OEvortex__HelpingAI2.5-10B/results_2024-11-20T00-30-08.208806.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.3872901678657074,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"prompt_level_loose_acc,none\"\
: 0.2975970425138632,\n \"prompt_level_loose_acc_stderr,none\": 0.019674812004413393,\n\
\ \"prompt_level_strict_acc,none\": 0.2680221811460259,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.019060638691630334,\n \"\
acc_norm,none\": 0.41198599040083017,\n \"acc_norm_stderr,none\": 0.0053485717605985415,\n\
\ \"inst_level_loose_acc,none\": 0.4136690647482014,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\": 0.25748005319148937,\n\
\ \"acc_stderr,none\": 0.0039863460274399175,\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.44662385002603716,\n \"acc_norm_stderr,none\": 0.006256494886945098,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.724,\n \"acc_norm_stderr,none\": 0.02832853727421142\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.5614973262032086,\n\
\ \"acc_norm_stderr,none\": 0.03638341809400991\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\":\
\ 0.03168215643141386\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.484,\n\
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\":\
\ 0.030881038748993974\n },\n \"leaderboard_bbh_hyperbaton\": {\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\"\
: 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255664\n },\n\
\ \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.384,\n \"acc_norm_stderr,none\": 0.030821679117375447\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.276,\n \"acc_norm_stderr,none\": 0.02832853727421142\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\":\
\ 0.031563285061213475\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\": 0.03041876402517494\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.5136986301369864,\n \"acc_norm_stderr,none\": 0.04150715331223415\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.692,\n \
\ \"acc_norm_stderr,none\": 0.02925692860650181\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
,\n \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.4943820224719101,\n\
\ \"acc_norm_stderr,none\": 0.03757992900475984\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.668,\n \"acc_norm_stderr,none\":\
\ 0.029844039047465857\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\":\
\ 0.02455581299422255\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\":\
\ 0.022249407735450245\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\":\
\ 0.029752391824475363\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.26929530201342283,\n\
\ \"acc_norm_stderr,none\": 0.012861801928493162,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.25252525252525254,\n \"acc_norm_stderr,none\": 0.03095405547036587\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2673992673992674,\n\
\ \"acc_norm_stderr,none\": 0.018959004502646776\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27901785714285715,\n \"acc_norm_stderr,none\"\
: 0.021214094157265946\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.2680221811460259,\n \"prompt_level_strict_acc_stderr,none\": 0.019060638691630338,\n\
\ \"inst_level_strict_acc,none\": 0.3872901678657074,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.2975970425138632,\n \"prompt_level_loose_acc_stderr,none\": 0.019674812004413393,\n\
\ \"inst_level_loose_acc,none\": 0.4136690647482014,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_hard\"\n },\n \
\ \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\"\
: \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\":\
\ 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\"\
: 0.25748005319148937,\n \"acc_stderr,none\": 0.0039863460274399175\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.373015873015873,\n\
\ \"acc_norm_stderr,none\": 0.01703947463285422,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \
\ \"acc_norm,none\": 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255664\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\"\
: \" - leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2734375,\n\
\ \"acc_norm_stderr,none\": 0.027912287939448926\n },\n \
\ \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n\
\ }\n },\n \"leaderboard\": {\n \"inst_level_strict_acc,none\"\
: 0.3872901678657074,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"prompt_level_loose_acc,none\": 0.2975970425138632,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.019674812004413393,\n \"prompt_level_strict_acc,none\": 0.2680221811460259,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.019060638691630334,\n \
\ \"acc_norm,none\": 0.41198599040083017,\n \"acc_norm_stderr,none\": 0.0053485717605985415,\n\
\ \"inst_level_loose_acc,none\": 0.4136690647482014,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"acc,none\": 0.25748005319148937,\n \"acc_stderr,none\"\
: 0.0039863460274399175,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.44662385002603716,\n \"acc_norm_stderr,none\"\
: 0.006256494886945098,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.724,\n \"acc_norm_stderr,none\": 0.02832853727421142\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5614973262032086,\n \"acc_norm_stderr,none\"\
: 0.03638341809400991\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.436,\n \"acc_norm_stderr,none\": 0.031425567060281365\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.484,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\": 0.030881038748993974\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255664\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.384,\n \"acc_norm_stderr,none\": 0.030821679117375447\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.276,\n \"acc_norm_stderr,none\": 0.02832853727421142\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\": 0.03041876402517494\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.5136986301369864,\n\
\ \"acc_norm_stderr,none\": 0.04150715331223415\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.692,\n \"acc_norm_stderr,none\": 0.02925692860650181\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4943820224719101,\n \"acc_norm_stderr,none\"\
: 0.03757992900475984\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.668,\n \"acc_norm_stderr,none\": 0.029844039047465857\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.196,\n \"acc_norm_stderr,none\": 0.025156857313255926\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422255\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.144,\n \"acc_norm_stderr,none\": 0.022249407735450245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.26929530201342283,\n\
\ \"acc_norm_stderr,none\": 0.012861801928493162,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.25252525252525254,\n\
\ \"acc_norm_stderr,none\": 0.03095405547036587\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2673992673992674,\n \"acc_norm_stderr,none\": 0.018959004502646776\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27901785714285715,\n \"acc_norm_stderr,none\"\
: 0.021214094157265946\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.2680221811460259,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.019060638691630338,\n \
\ \"inst_level_strict_acc,none\": 0.3872901678657074,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.2975970425138632,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.019674812004413393,\n \"inst_level_loose_acc,none\"\
: 0.4136690647482014,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_hard\"\
\n },\n \"leaderboard_math_algebra_hard\": {\n \"alias\": \" - leaderboard_math_algebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_mmlu_pro\": {\n\
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.25748005319148937,\n\
\ \"acc_stderr,none\": 0.0039863460274399175\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.373015873015873,\n \"acc_norm_stderr,none\"\
: 0.01703947463285422,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255664\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.2734375,\n\
\ \"acc_norm_stderr,none\": 0.027912287939448926\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.3,\n \"acc_norm_stderr,none\": 0.029040893477575783\n }\n}\n```"
repo_url: https://huggingface.co/OEvortex/HelpingAI2.5-10B
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_ifeval
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-30-08.208806.jsonl'
- config_name: OEvortex__HelpingAI2.5-10B__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T00_30_08.208806
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-30-08.208806.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-30-08.208806.jsonl'
---
# Dataset Card for Evaluation run of OEvortex/HelpingAI2.5-10B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [OEvortex/HelpingAI2.5-10B](https://huggingface.co/OEvortex/HelpingAI2.5-10B)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/OEvortex__HelpingAI2.5-10B-details",
name="OEvortex__HelpingAI2.5-10B__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T00-30-08.208806](https://huggingface.co/datasets/open-llm-leaderboard/OEvortex__HelpingAI2.5-10B-details/blob/main/OEvortex__HelpingAI2.5-10B/results_2024-11-20T00-30-08.208806.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.3872901678657074,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"prompt_level_loose_acc,none": 0.2975970425138632,
"prompt_level_loose_acc_stderr,none": 0.019674812004413393,
"prompt_level_strict_acc,none": 0.2680221811460259,
"prompt_level_strict_acc_stderr,none": 0.019060638691630334,
"acc_norm,none": 0.41198599040083017,
"acc_norm_stderr,none": 0.0053485717605985415,
"inst_level_loose_acc,none": 0.4136690647482014,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.25748005319148937,
"acc_stderr,none": 0.0039863460274399175,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.44662385002603716,
"acc_norm_stderr,none": 0.006256494886945098,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.724,
"acc_norm_stderr,none": 0.02832853727421142
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5614973262032086,
"acc_norm_stderr,none": 0.03638341809400991
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.384,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.276,
"acc_norm_stderr,none": 0.02832853727421142
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.03041876402517494
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.5136986301369864,
"acc_norm_stderr,none": 0.04150715331223415
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.692,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4943820224719101,
"acc_norm_stderr,none": 0.03757992900475984
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.668,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.26929530201342283,
"acc_norm_stderr,none": 0.012861801928493162,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.25252525252525254,
"acc_norm_stderr,none": 0.03095405547036587
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2673992673992674,
"acc_norm_stderr,none": 0.018959004502646776
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27901785714285715,
"acc_norm_stderr,none": 0.021214094157265946
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.2680221811460259,
"prompt_level_strict_acc_stderr,none": 0.019060638691630338,
"inst_level_strict_acc,none": 0.3872901678657074,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2975970425138632,
"prompt_level_loose_acc_stderr,none": 0.019674812004413393,
"inst_level_loose_acc,none": 0.4136690647482014,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.25748005319148937,
"acc_stderr,none": 0.0039863460274399175
},
"leaderboard_musr": {
"acc_norm,none": 0.373015873015873,
"acc_norm_stderr,none": 0.01703947463285422,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2734375,
"acc_norm_stderr,none": 0.027912287939448926
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.3872901678657074,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"prompt_level_loose_acc,none": 0.2975970425138632,
"prompt_level_loose_acc_stderr,none": 0.019674812004413393,
"prompt_level_strict_acc,none": 0.2680221811460259,
"prompt_level_strict_acc_stderr,none": 0.019060638691630334,
"acc_norm,none": 0.41198599040083017,
"acc_norm_stderr,none": 0.0053485717605985415,
"inst_level_loose_acc,none": 0.4136690647482014,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.25748005319148937,
"acc_stderr,none": 0.0039863460274399175,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.44662385002603716,
"acc_norm_stderr,none": 0.006256494886945098,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.724,
"acc_norm_stderr,none": 0.02832853727421142
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5614973262032086,
"acc_norm_stderr,none": 0.03638341809400991
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.436,
"acc_norm_stderr,none": 0.031425567060281365
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.384,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.276,
"acc_norm_stderr,none": 0.02832853727421142
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.03041876402517494
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.5136986301369864,
"acc_norm_stderr,none": 0.04150715331223415
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.692,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4943820224719101,
"acc_norm_stderr,none": 0.03757992900475984
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.668,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.196,
"acc_norm_stderr,none": 0.025156857313255926
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.144,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.26929530201342283,
"acc_norm_stderr,none": 0.012861801928493162,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.25252525252525254,
"acc_norm_stderr,none": 0.03095405547036587
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2673992673992674,
"acc_norm_stderr,none": 0.018959004502646776
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27901785714285715,
"acc_norm_stderr,none": 0.021214094157265946
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.2680221811460259,
"prompt_level_strict_acc_stderr,none": 0.019060638691630338,
"inst_level_strict_acc,none": 0.3872901678657074,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2975970425138632,
"prompt_level_loose_acc_stderr,none": 0.019674812004413393,
"inst_level_loose_acc,none": 0.4136690647482014,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.25748005319148937,
"acc_stderr,none": 0.0039863460274399175
},
"leaderboard_musr": {
"acc_norm,none": 0.373015873015873,
"acc_norm_stderr,none": 0.01703947463285422,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2734375,
"acc_norm_stderr,none": 0.027912287939448926
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.3,
"acc_norm_stderr,none": 0.029040893477575783
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
JJuny/llama2_SYC_1120_with_testPDF_train | JJuny | "2024-11-20T00:34:39Z" | 9 | 0 | [
"region:us"
] | null | "2024-11-20T00:33:55Z" | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 533450
num_examples: 71
download_size: 145605
dataset_size: 533450
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
magnifi/parser_user_v27e | magnifi | "2024-11-20T00:46:01Z" | 9 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:46:00Z" | ---
dataset_info:
features:
- name: Query_id
dtype: int64
- name: Query
dtype: string
- name: Elastic_search
dtype: string
- name: List_of_portfolios
dtype: string
splits:
- name: train
num_bytes: 124203
num_examples: 1058
- name: validation
num_bytes: 13674
num_examples: 126
download_size: 59242
dataset_size: 137877
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
---
|
Xtest/function_dataset_with_ast_image_final | Xtest | "2024-11-20T00:55:23Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:54:08Z" | ---
dataset_info:
features:
- name: function_all
dtype: string
- name: function_name
dtype: string
- name: function_body
dtype: string
- name: function_name_with_paramaters
dtype: string
- name: function_body_only
dtype: string
- name: Modified Code
dtype: string
- name: S-Expression of Original Code
dtype: string
- name: S-Expression of Modified Code
dtype: string
- name: AST Image Original
dtype: string
- name: AST Image Modified
dtype: string
splits:
- name: train
num_bytes: 66117971
num_examples: 10
- name: test
num_bytes: 32696343
num_examples: 10
download_size: 95975065
dataset_size: 98814314
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
open-llm-leaderboard/Aurel9__testmerge-7b-details | open-llm-leaderboard | "2024-11-20T01:02:08Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T00:59:08Z" | ---
pretty_name: Evaluation run of Aurel9/testmerge-7b
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [Aurel9/testmerge-7b](https://huggingface.co/Aurel9/testmerge-7b)\nThe dataset\
\ is composed of 38 configuration(s), each one corresponding to one of the evaluated\
\ task.\n\nThe dataset has been created from 1 run(s). Each run can be found as\
\ a specific split in each configuration, the split being named using the timestamp\
\ of the run.The \"train\" split is always pointing to the latest results.\n\nAn\
\ additional configuration \"results\" store all the aggregated results of the run.\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/Aurel9__testmerge-7b-details\"\
,\n\tname=\"Aurel9__testmerge-7b__leaderboard_bbh_boolean_expressions\",\n\tsplit=\"\
latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results from run\
\ 2024-11-20T00-59-08.345134](https://huggingface.co/datasets/open-llm-leaderboard/Aurel9__testmerge-7b-details/blob/main/Aurel9__testmerge-7b/results_2024-11-20T00-59-08.345134.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.47710468283824103,\n \"acc_norm_stderr,none\"\
: 0.005294530877237734,\n \"prompt_level_loose_acc,none\": 0.34750462107208874,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.020491423653415698,\n \
\ \"inst_level_loose_acc,none\": 0.4844124700239808,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_strict_acc,none\": 0.32717190388170053,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.020190318966906255,\n \
\ \"exact_match,none\": 0.06722054380664652,\n \"exact_match_stderr,none\"\
: 0.006813337719672289,\n \"acc,none\": 0.3052692819148936,\n \
\ \"acc_stderr,none\": 0.004198546235789469,\n \"inst_level_strict_acc,none\"\
: 0.46882494004796166,\n \"inst_level_strict_acc_stderr,none\": \"N/A\"\
,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5151883353584447,\n \"acc_norm_stderr,none\"\
: 0.006085095576474446,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.772,\n\
\ \"acc_norm_stderr,none\": 0.026587432487268498\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6149732620320856,\n \"acc_norm_stderr,none\"\
: 0.03567936280544673\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.692,\n\
\ \"acc_norm_stderr,none\": 0.02925692860650181\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\":\
\ 0.03151438761115348\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.324,\n \"acc_norm_stderr,none\": 0.029658294924545567\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.732,\n \
\ \"acc_norm_stderr,none\": 0.02806876238252672\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\":\
\ 0.0316851985511992\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.728,\n \"acc_norm_stderr,none\": 0.028200088296309975\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.748,\n \"acc_norm_stderr,none\": 0.027513851933031318\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.612,\n \"acc_norm_stderr,none\":\
\ 0.030881038748993974\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.5,\n \"acc_norm_stderr,none\": 0.041522739926869986\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n \"\
acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.572,\n \
\ \"acc_norm_stderr,none\": 0.031355968923772626\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.7640449438202247,\n \"acc_norm_stderr,none\"\
: 0.03191445731253357\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.828,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.212,\n\
\ \"acc_norm_stderr,none\": 0.025901884690541117\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750513\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\":\
\ 0.022995023034068682\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\":\
\ 0.029462657598578648\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.48,\n \"acc_norm_stderr,none\": 0.03166085340849512\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.30033557046979864,\n\
\ \"acc_norm_stderr,none\": 0.013290502708254225,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.32323232323232326,\n \"acc_norm_stderr,none\": 0.03332299921070644\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.29853479853479853,\n\
\ \"acc_norm_stderr,none\": 0.01960208623041336\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2924107142857143,\n \"acc_norm_stderr,none\"\
: 0.02151461125992856\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.32717190388170053,\n \"prompt_level_strict_acc_stderr,none\": 0.020190318966906255,\n\
\ \"inst_level_strict_acc,none\": 0.46882494004796166,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.34750462107208874,\n \"prompt_level_loose_acc_stderr,none\": 0.020491423653415698,\n\
\ \"inst_level_loose_acc,none\": 0.4844124700239808,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.06722054380664652,\n \"exact_match_stderr,none\"\
: 0.006813337719672289,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.11074918566775244,\n\
\ \"exact_match_stderr,none\": 0.017939969209400983\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.07317073170731707,\n \"exact_match_stderr,none\": 0.023577005978097667\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.022727272727272728,\n\
\ \"exact_match_stderr,none\": 0.0130210469090637\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\": \"\
\ - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.02857142857142857,\n \"exact_match_stderr,none\": 0.009973998820736053\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.05844155844155844,\n\
\ \"exact_match_stderr,none\": 0.018964387451957845\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.11917098445595854,\n \"exact_match_stderr,none\"\
: 0.02338193534812143\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.022222222222222223,\n \"exact_match_stderr,none\"\
: 0.01273389971505968\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.3052692819148936,\n\
\ \"acc_stderr,none\": 0.004198546235789468\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4656084656084656,\n \"acc_norm_stderr,none\"\
: 0.018039786017608982,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.552,\n\
\ \"acc_norm_stderr,none\": 0.03151438761115348\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.43359375,\n \"acc_norm_stderr,none\"\
: 0.031033834158735715\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.412,\n \"acc_norm_stderr,none\": 0.03119159602602282\n\
\ }\n },\n \"leaderboard\": {\n \"acc_norm,none\": 0.47710468283824103,\n\
\ \"acc_norm_stderr,none\": 0.005294530877237734,\n \"prompt_level_loose_acc,none\"\
: 0.34750462107208874,\n \"prompt_level_loose_acc_stderr,none\": 0.020491423653415698,\n\
\ \"inst_level_loose_acc,none\": 0.4844124700239808,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_strict_acc,none\": 0.32717190388170053,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.020190318966906255,\n \"\
exact_match,none\": 0.06722054380664652,\n \"exact_match_stderr,none\": 0.006813337719672289,\n\
\ \"acc,none\": 0.3052692819148936,\n \"acc_stderr,none\": 0.004198546235789469,\n\
\ \"inst_level_strict_acc,none\": 0.46882494004796166,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5151883353584447,\n \"acc_norm_stderr,none\"\
: 0.006085095576474446,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.772,\n \"acc_norm_stderr,none\": 0.026587432487268498\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6149732620320856,\n \"acc_norm_stderr,none\"\
: 0.03567936280544673\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522374\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.692,\n \"acc_norm_stderr,none\": 0.02925692860650181\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.324,\n \"acc_norm_stderr,none\": 0.029658294924545567\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.732,\n \"acc_norm_stderr,none\": 0.02806876238252672\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.728,\n \"acc_norm_stderr,none\": 0.028200088296309975\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.748,\n \"acc_norm_stderr,none\": 0.027513851933031318\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.612,\n \"acc_norm_stderr,none\": 0.030881038748993974\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.5,\n \
\ \"acc_norm_stderr,none\": 0.041522739926869986\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.572,\n \"acc_norm_stderr,none\": 0.031355968923772626\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.7640449438202247,\n \"acc_norm_stderr,none\"\
: 0.03191445731253357\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.828,\n \"acc_norm_stderr,none\": 0.02391551394448624\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\": 0.025901884690541117\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750513\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068682\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.03166085340849512\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.30033557046979864,\n\
\ \"acc_norm_stderr,none\": 0.013290502708254225,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.32323232323232326,\n\
\ \"acc_norm_stderr,none\": 0.03332299921070644\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.29853479853479853,\n \"acc_norm_stderr,none\": 0.01960208623041336\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2924107142857143,\n \"acc_norm_stderr,none\"\
: 0.02151461125992856\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.32717190388170053,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.020190318966906255,\n \
\ \"inst_level_strict_acc,none\": 0.46882494004796166,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.34750462107208874,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.020491423653415698,\n \"inst_level_loose_acc,none\"\
: 0.4844124700239808,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.06722054380664652,\n\
\ \"exact_match_stderr,none\": 0.006813337719672289,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.11074918566775244,\n \"exact_match_stderr,none\": 0.017939969209400983\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.07317073170731707,\n \"exact_match_stderr,none\": 0.023577005978097667\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.022727272727272728,\n \"exact_match_stderr,none\"\
: 0.0130210469090637\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.02857142857142857,\n \"exact_match_stderr,none\"\
: 0.009973998820736053\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.05844155844155844,\n \"exact_match_stderr,none\": 0.018964387451957845\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.11917098445595854,\n \"exact_match_stderr,none\"\
: 0.02338193534812143\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.022222222222222223,\n \"exact_match_stderr,none\": 0.01273389971505968\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.3052692819148936,\n \"acc_stderr,none\": 0.004198546235789468\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.4656084656084656,\n\
\ \"acc_norm_stderr,none\": 0.018039786017608982,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.43359375,\n \"acc_norm_stderr,none\": 0.031033834158735715\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.412,\n \"acc_norm_stderr,none\": 0.03119159602602282\n\
\ }\n}\n```"
repo_url: https://huggingface.co/Aurel9/testmerge-7b
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_ifeval
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T00-59-08.345134.jsonl'
- config_name: Aurel9__testmerge-7b__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T00_59_08.345134
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-59-08.345134.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T00-59-08.345134.jsonl'
---
# Dataset Card for Evaluation run of Aurel9/testmerge-7b
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [Aurel9/testmerge-7b](https://huggingface.co/Aurel9/testmerge-7b)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/Aurel9__testmerge-7b-details",
name="Aurel9__testmerge-7b__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T00-59-08.345134](https://huggingface.co/datasets/open-llm-leaderboard/Aurel9__testmerge-7b-details/blob/main/Aurel9__testmerge-7b/results_2024-11-20T00-59-08.345134.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.47710468283824103,
"acc_norm_stderr,none": 0.005294530877237734,
"prompt_level_loose_acc,none": 0.34750462107208874,
"prompt_level_loose_acc_stderr,none": 0.020491423653415698,
"inst_level_loose_acc,none": 0.4844124700239808,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.32717190388170053,
"prompt_level_strict_acc_stderr,none": 0.020190318966906255,
"exact_match,none": 0.06722054380664652,
"exact_match_stderr,none": 0.006813337719672289,
"acc,none": 0.3052692819148936,
"acc_stderr,none": 0.004198546235789469,
"inst_level_strict_acc,none": 0.46882494004796166,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5151883353584447,
"acc_norm_stderr,none": 0.006085095576474446,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.772,
"acc_norm_stderr,none": 0.026587432487268498
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.692,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.732,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.748,
"acc_norm_stderr,none": 0.027513851933031318
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.612,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.041522739926869986
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.572,
"acc_norm_stderr,none": 0.031355968923772626
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.7640449438202247,
"acc_norm_stderr,none": 0.03191445731253357
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.828,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750513
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_gpqa": {
"acc_norm,none": 0.30033557046979864,
"acc_norm_stderr,none": 0.013290502708254225,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.32323232323232326,
"acc_norm_stderr,none": 0.03332299921070644
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.29853479853479853,
"acc_norm_stderr,none": 0.01960208623041336
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2924107142857143,
"acc_norm_stderr,none": 0.02151461125992856
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.32717190388170053,
"prompt_level_strict_acc_stderr,none": 0.020190318966906255,
"inst_level_strict_acc,none": 0.46882494004796166,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.34750462107208874,
"prompt_level_loose_acc_stderr,none": 0.020491423653415698,
"inst_level_loose_acc,none": 0.4844124700239808,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.06722054380664652,
"exact_match_stderr,none": 0.006813337719672289,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.11074918566775244,
"exact_match_stderr,none": 0.017939969209400983
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.07317073170731707,
"exact_match_stderr,none": 0.023577005978097667
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.0130210469090637
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.02857142857142857,
"exact_match_stderr,none": 0.009973998820736053
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.05844155844155844,
"exact_match_stderr,none": 0.018964387451957845
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.11917098445595854,
"exact_match_stderr,none": 0.02338193534812143
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.022222222222222223,
"exact_match_stderr,none": 0.01273389971505968
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3052692819148936,
"acc_stderr,none": 0.004198546235789468
},
"leaderboard_musr": {
"acc_norm,none": 0.4656084656084656,
"acc_norm_stderr,none": 0.018039786017608982,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.43359375,
"acc_norm_stderr,none": 0.031033834158735715
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.412,
"acc_norm_stderr,none": 0.03119159602602282
}
},
"leaderboard": {
"acc_norm,none": 0.47710468283824103,
"acc_norm_stderr,none": 0.005294530877237734,
"prompt_level_loose_acc,none": 0.34750462107208874,
"prompt_level_loose_acc_stderr,none": 0.020491423653415698,
"inst_level_loose_acc,none": 0.4844124700239808,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.32717190388170053,
"prompt_level_strict_acc_stderr,none": 0.020190318966906255,
"exact_match,none": 0.06722054380664652,
"exact_match_stderr,none": 0.006813337719672289,
"acc,none": 0.3052692819148936,
"acc_stderr,none": 0.004198546235789469,
"inst_level_strict_acc,none": 0.46882494004796166,
"inst_level_strict_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5151883353584447,
"acc_norm_stderr,none": 0.006085095576474446,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.772,
"acc_norm_stderr,none": 0.026587432487268498
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522374
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.692,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.732,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.748,
"acc_norm_stderr,none": 0.027513851933031318
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.612,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.041522739926869986
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.572,
"acc_norm_stderr,none": 0.031355968923772626
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.7640449438202247,
"acc_norm_stderr,none": 0.03191445731253357
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.828,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.025901884690541117
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750513
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068682
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.03166085340849512
},
"leaderboard_gpqa": {
"acc_norm,none": 0.30033557046979864,
"acc_norm_stderr,none": 0.013290502708254225,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.32323232323232326,
"acc_norm_stderr,none": 0.03332299921070644
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.29853479853479853,
"acc_norm_stderr,none": 0.01960208623041336
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2924107142857143,
"acc_norm_stderr,none": 0.02151461125992856
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.32717190388170053,
"prompt_level_strict_acc_stderr,none": 0.020190318966906255,
"inst_level_strict_acc,none": 0.46882494004796166,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.34750462107208874,
"prompt_level_loose_acc_stderr,none": 0.020491423653415698,
"inst_level_loose_acc,none": 0.4844124700239808,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.06722054380664652,
"exact_match_stderr,none": 0.006813337719672289,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.11074918566775244,
"exact_match_stderr,none": 0.017939969209400983
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.07317073170731707,
"exact_match_stderr,none": 0.023577005978097667
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.0130210469090637
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.02857142857142857,
"exact_match_stderr,none": 0.009973998820736053
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.05844155844155844,
"exact_match_stderr,none": 0.018964387451957845
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.11917098445595854,
"exact_match_stderr,none": 0.02338193534812143
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.022222222222222223,
"exact_match_stderr,none": 0.01273389971505968
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3052692819148936,
"acc_stderr,none": 0.004198546235789468
},
"leaderboard_musr": {
"acc_norm,none": 0.4656084656084656,
"acc_norm_stderr,none": 0.018039786017608982,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.43359375,
"acc_norm_stderr,none": 0.031033834158735715
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.412,
"acc_norm_stderr,none": 0.03119159602602282
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/gmonsoon__SahabatAI-MediChatIndo-8B-v1-details | open-llm-leaderboard | "2024-11-20T01:04:49Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T01:00:52Z" | ---
pretty_name: Evaluation run of gmonsoon/SahabatAI-MediChatIndo-8B-v1
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [gmonsoon/SahabatAI-MediChatIndo-8B-v1](https://huggingface.co/gmonsoon/SahabatAI-MediChatIndo-8B-v1)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/gmonsoon__SahabatAI-MediChatIndo-8B-v1-details\"\
,\n\tname=\"gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T01-00-50.945600](https://huggingface.co/datasets/open-llm-leaderboard/gmonsoon__SahabatAI-MediChatIndo-8B-v1-details/blob/main/gmonsoon__SahabatAI-MediChatIndo-8B-v1/results_2024-11-20T01-00-50.945600.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.41600726423660656,\n \"acc_norm_stderr,none\"\
: 0.005242772894833692,\n \"prompt_level_loose_acc,none\": 0.43068391866913125,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.021308808857898823,\n \
\ \"inst_level_loose_acc,none\": 0.5683453237410072,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"acc,none\": 0.3107546542553192,\n \"acc_stderr,none\"\
: 0.004219343456812655,\n \"inst_level_strict_acc,none\": 0.4832134292565948,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.34935304990757854,\n \"prompt_level_strict_acc_stderr,none\": 0.02051672743144045,\n\
\ \"exact_match,none\": 0.061933534743202415,\n \"exact_match_stderr,none\"\
: 0.006506956861299618,\n \"alias\": \"leaderboard\"\n },\n \
\ \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.4490539836833883,\n\
\ \"acc_norm_stderr,none\": 0.0060705431232413445,\n \"alias\"\
: \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.756,\n \"acc_norm_stderr,none\": 0.02721799546455311\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.6310160427807486,\n\
\ \"acc_norm_stderr,none\": 0.03538078548260318\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\":\
\ 0.03168215643141386\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \
\ \"acc_norm,none\": 0.456,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\",\n \"acc_norm,none\": 0.532,\n\
\ \"acc_norm_stderr,none\": 0.031621252575725574\n },\n \
\ \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.716,\n \
\ \"acc_norm_stderr,none\": 0.028576958730437443\n },\n \"\
leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\": \" \
\ - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.348,\n \"acc_norm_stderr,none\": 0.030186568464511673\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.272,\n \"acc_norm_stderr,none\": 0.028200088296309975\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.672,\n \"acc_norm_stderr,none\": 0.029752391824475363\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\":\
\ 0.03148684942554571\n },\n \"leaderboard_bbh_object_counting\":\
\ {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.4246575342465753,\n \"acc_norm_stderr,none\": 0.04104862657656195\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.44,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.552,\n \
\ \"acc_norm_stderr,none\": 0.03151438761115348\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
,\n \"acc_norm,none\": 0.416,\n \"acc_norm_stderr,none\":\
\ 0.031235856237014505\n },\n \"leaderboard_bbh_snarks\": {\n \
\ \"alias\": \" - leaderboard_bbh_snarks\",\n \"acc_norm,none\"\
: 0.47752808988764045,\n \"acc_norm_stderr,none\": 0.03754432508487191\n\
\ },\n \"leaderboard_bbh_sports_understanding\": {\n \"\
alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.756,\n \"acc_norm_stderr,none\": 0.02721799546455311\n },\n\
\ \"leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" -\
\ leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.076,\n\
\ \"acc_norm_stderr,none\": 0.01679357306785969\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\":\
\ 0.021723342617052086\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\":\
\ 0.029462657598578648\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2827181208053691,\n\
\ \"acc_norm_stderr,none\": 0.013056325474757132,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.26262626262626265,\n \"acc_norm_stderr,none\": 0.031353050095330834\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.28205128205128205,\n\
\ \"acc_norm_stderr,none\": 0.019275803929950375\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2924107142857143,\n \"acc_norm_stderr,none\"\
: 0.02151461125992856\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.34935304990757854,\n \"prompt_level_strict_acc_stderr,none\": 0.02051672743144045,\n\
\ \"inst_level_strict_acc,none\": 0.4832134292565947,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.43068391866913125,\n \"prompt_level_loose_acc_stderr,none\": 0.021308808857898823,\n\
\ \"inst_level_loose_acc,none\": 0.5683453237410072,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.061933534743202415,\n \"exact_match_stderr,none\"\
: 0.006506956861299618,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.11400651465798045,\n\
\ \"exact_match_stderr,none\": 0.01816851306262922\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \" \
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.016260162601626018,\n \"exact_match_stderr,none\": 0.011450452676925654\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.030303030303030304,\n\
\ \"exact_match_stderr,none\": 0.014977019714308254\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.014285714285714285,\n \"exact_match_stderr,none\": 0.0071043508939153165\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.025974025974025976,\n\
\ \"exact_match_stderr,none\": 0.012859058999697068\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.13471502590673576,\n \"exact_match_stderr,none\"\
: 0.02463978909770943\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.05185185185185185,\n \"exact_match_stderr,none\"\
: 0.019154368449050496\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.3107546542553192,\n\
\ \"acc_stderr,none\": 0.004219343456812655\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.37433862433862436,\n \"acc_norm_stderr,none\"\
: 0.0171566487360708,\n \"alias\": \" - leaderboard_musr\"\n },\n\
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" -\
\ leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.516,\n\
\ \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.2421875,\n \"acc_norm_stderr,none\"\
: 0.026827898476066977\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ }\n },\n \"leaderboard\": {\n \"acc_norm,none\": 0.41600726423660656,\n\
\ \"acc_norm_stderr,none\": 0.005242772894833692,\n \"prompt_level_loose_acc,none\"\
: 0.43068391866913125,\n \"prompt_level_loose_acc_stderr,none\": 0.021308808857898823,\n\
\ \"inst_level_loose_acc,none\": 0.5683453237410072,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"acc,none\": 0.3107546542553192,\n \"acc_stderr,none\"\
: 0.004219343456812655,\n \"inst_level_strict_acc,none\": 0.4832134292565948,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.34935304990757854,\n \"prompt_level_strict_acc_stderr,none\": 0.02051672743144045,\n\
\ \"exact_match,none\": 0.061933534743202415,\n \"exact_match_stderr,none\"\
: 0.006506956861299618,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.4490539836833883,\n \"acc_norm_stderr,none\"\
: 0.0060705431232413445,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.756,\n \"acc_norm_stderr,none\": 0.02721799546455311\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6310160427807486,\n \"acc_norm_stderr,none\"\
: 0.03538078548260318\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.456,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.716,\n \"acc_norm_stderr,none\": 0.028576958730437443\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.348,\n \"acc_norm_stderr,none\": 0.030186568464511673\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.272,\n \"acc_norm_stderr,none\": 0.028200088296309975\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.672,\n \"acc_norm_stderr,none\": 0.029752391824475363\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.4246575342465753,\n\
\ \"acc_norm_stderr,none\": 0.04104862657656195\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.44,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.416,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.47752808988764045,\n \"acc_norm_stderr,none\"\
: 0.03754432508487191\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.756,\n \"acc_norm_stderr,none\": 0.02721799546455311\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.076,\n \"acc_norm_stderr,none\": 0.01679357306785969\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.021723342617052086\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2827181208053691,\n\
\ \"acc_norm_stderr,none\": 0.013056325474757132,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.26262626262626265,\n\
\ \"acc_norm_stderr,none\": 0.031353050095330834\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.28205128205128205,\n \"acc_norm_stderr,none\": 0.019275803929950375\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.2924107142857143,\n \"acc_norm_stderr,none\"\
: 0.02151461125992856\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.34935304990757854,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.02051672743144045,\n \
\ \"inst_level_strict_acc,none\": 0.4832134292565947,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.43068391866913125,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.021308808857898823,\n \"inst_level_loose_acc,none\"\
: 0.5683453237410072,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.061933534743202415,\n\
\ \"exact_match_stderr,none\": 0.006506956861299618,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.11400651465798045,\n \"exact_match_stderr,none\": 0.01816851306262922\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.016260162601626018,\n \"exact_match_stderr,none\": 0.011450452676925654\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.030303030303030304,\n \"exact_match_stderr,none\"\
: 0.014977019714308254\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.014285714285714285,\n \"exact_match_stderr,none\"\
: 0.0071043508939153165\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.025974025974025976,\n \"exact_match_stderr,none\": 0.012859058999697068\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.13471502590673576,\n \"exact_match_stderr,none\"\
: 0.02463978909770943\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.05185185185185185,\n \"exact_match_stderr,none\": 0.019154368449050496\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.3107546542553192,\n \"acc_stderr,none\": 0.004219343456812655\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.37433862433862436,\n\
\ \"acc_norm_stderr,none\": 0.0171566487360708,\n \"alias\": \" -\
\ leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.2421875,\n \"acc_norm_stderr,none\": 0.026827898476066977\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ }\n}\n```"
repo_url: https://huggingface.co/gmonsoon/SahabatAI-MediChatIndo-8B-v1
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_ifeval
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-00-50.945600.jsonl'
- config_name: gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T01_00_50.945600
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-00-50.945600.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-00-50.945600.jsonl'
---
# Dataset Card for Evaluation run of gmonsoon/SahabatAI-MediChatIndo-8B-v1
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [gmonsoon/SahabatAI-MediChatIndo-8B-v1](https://huggingface.co/gmonsoon/SahabatAI-MediChatIndo-8B-v1)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/gmonsoon__SahabatAI-MediChatIndo-8B-v1-details",
name="gmonsoon__SahabatAI-MediChatIndo-8B-v1__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T01-00-50.945600](https://huggingface.co/datasets/open-llm-leaderboard/gmonsoon__SahabatAI-MediChatIndo-8B-v1-details/blob/main/gmonsoon__SahabatAI-MediChatIndo-8B-v1/results_2024-11-20T01-00-50.945600.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.41600726423660656,
"acc_norm_stderr,none": 0.005242772894833692,
"prompt_level_loose_acc,none": 0.43068391866913125,
"prompt_level_loose_acc_stderr,none": 0.021308808857898823,
"inst_level_loose_acc,none": 0.5683453237410072,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.3107546542553192,
"acc_stderr,none": 0.004219343456812655,
"inst_level_strict_acc,none": 0.4832134292565948,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.34935304990757854,
"prompt_level_strict_acc_stderr,none": 0.02051672743144045,
"exact_match,none": 0.061933534743202415,
"exact_match_stderr,none": 0.006506956861299618,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4490539836833883,
"acc_norm_stderr,none": 0.0060705431232413445,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.756,
"acc_norm_stderr,none": 0.02721799546455311
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6310160427807486,
"acc_norm_stderr,none": 0.03538078548260318
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.716,
"acc_norm_stderr,none": 0.028576958730437443
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.348,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.272,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.672,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4246575342465753,
"acc_norm_stderr,none": 0.04104862657656195
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.47752808988764045,
"acc_norm_stderr,none": 0.03754432508487191
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.756,
"acc_norm_stderr,none": 0.02721799546455311
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.076,
"acc_norm_stderr,none": 0.01679357306785969
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2827181208053691,
"acc_norm_stderr,none": 0.013056325474757132,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330834
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.28205128205128205,
"acc_norm_stderr,none": 0.019275803929950375
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2924107142857143,
"acc_norm_stderr,none": 0.02151461125992856
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.34935304990757854,
"prompt_level_strict_acc_stderr,none": 0.02051672743144045,
"inst_level_strict_acc,none": 0.4832134292565947,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.43068391866913125,
"prompt_level_loose_acc_stderr,none": 0.021308808857898823,
"inst_level_loose_acc,none": 0.5683453237410072,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.061933534743202415,
"exact_match_stderr,none": 0.006506956861299618,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.11400651465798045,
"exact_match_stderr,none": 0.01816851306262922
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925654
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.030303030303030304,
"exact_match_stderr,none": 0.014977019714308254
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.014285714285714285,
"exact_match_stderr,none": 0.0071043508939153165
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.025974025974025976,
"exact_match_stderr,none": 0.012859058999697068
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.13471502590673576,
"exact_match_stderr,none": 0.02463978909770943
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.05185185185185185,
"exact_match_stderr,none": 0.019154368449050496
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3107546542553192,
"acc_stderr,none": 0.004219343456812655
},
"leaderboard_musr": {
"acc_norm,none": 0.37433862433862436,
"acc_norm_stderr,none": 0.0171566487360708,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2421875,
"acc_norm_stderr,none": 0.026827898476066977
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
}
},
"leaderboard": {
"acc_norm,none": 0.41600726423660656,
"acc_norm_stderr,none": 0.005242772894833692,
"prompt_level_loose_acc,none": 0.43068391866913125,
"prompt_level_loose_acc_stderr,none": 0.021308808857898823,
"inst_level_loose_acc,none": 0.5683453237410072,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.3107546542553192,
"acc_stderr,none": 0.004219343456812655,
"inst_level_strict_acc,none": 0.4832134292565948,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.34935304990757854,
"prompt_level_strict_acc_stderr,none": 0.02051672743144045,
"exact_match,none": 0.061933534743202415,
"exact_match_stderr,none": 0.006506956861299618,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4490539836833883,
"acc_norm_stderr,none": 0.0060705431232413445,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.756,
"acc_norm_stderr,none": 0.02721799546455311
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6310160427807486,
"acc_norm_stderr,none": 0.03538078548260318
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.716,
"acc_norm_stderr,none": 0.028576958730437443
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.348,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.272,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.672,
"acc_norm_stderr,none": 0.029752391824475363
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4246575342465753,
"acc_norm_stderr,none": 0.04104862657656195
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.416,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.47752808988764045,
"acc_norm_stderr,none": 0.03754432508487191
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.756,
"acc_norm_stderr,none": 0.02721799546455311
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.076,
"acc_norm_stderr,none": 0.01679357306785969
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.021723342617052086
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2827181208053691,
"acc_norm_stderr,none": 0.013056325474757132,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330834
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.28205128205128205,
"acc_norm_stderr,none": 0.019275803929950375
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.2924107142857143,
"acc_norm_stderr,none": 0.02151461125992856
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.34935304990757854,
"prompt_level_strict_acc_stderr,none": 0.02051672743144045,
"inst_level_strict_acc,none": 0.4832134292565947,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.43068391866913125,
"prompt_level_loose_acc_stderr,none": 0.021308808857898823,
"inst_level_loose_acc,none": 0.5683453237410072,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.061933534743202415,
"exact_match_stderr,none": 0.006506956861299618,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.11400651465798045,
"exact_match_stderr,none": 0.01816851306262922
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925654
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.030303030303030304,
"exact_match_stderr,none": 0.014977019714308254
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.014285714285714285,
"exact_match_stderr,none": 0.0071043508939153165
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.025974025974025976,
"exact_match_stderr,none": 0.012859058999697068
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.13471502590673576,
"exact_match_stderr,none": 0.02463978909770943
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.05185185185185185,
"exact_match_stderr,none": 0.019154368449050496
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3107546542553192,
"acc_stderr,none": 0.004219343456812655
},
"leaderboard_musr": {
"acc_norm,none": 0.37433862433862436,
"acc_norm_stderr,none": 0.0171566487360708,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.2421875,
"acc_norm_stderr,none": 0.026827898476066977
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis-details | open-llm-leaderboard | "2024-11-20T01:04:15Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T01:01:02Z" | ---
pretty_name: Evaluation run of MEscriva/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [MEscriva/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis](https://huggingface.co/MEscriva/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis-details\"\
,\n\tname=\"MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T01-01-01.509925](https://huggingface.co/datasets/open-llm-leaderboard/MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis-details/blob/main/MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis/results_2024-11-20T01-01-01.509925.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.3048385004540148,\n \"acc_norm_stderr,none\"\
: 0.004988068712886883,\n \"inst_level_strict_acc,none\": 0.10671462829736211,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.066543438077634,\n \"prompt_level_strict_acc_stderr,none\": 0.01072513566576045,\n\
\ \"exact_match,none\": 0.004531722054380665,\n \"exact_match_stderr,none\"\
: 0.0018429999361113496,\n \"inst_level_loose_acc,none\": 0.1211031175059952,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\"\
: 0.11544215425531915,\n \"acc_stderr,none\": 0.002913362107519347,\n\
\ \"prompt_level_loose_acc,none\": 0.08317929759704251,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01188373476400857,\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.30324596424231903,\n \"acc_norm_stderr,none\": 0.0057050677597805825,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \
\ \"acc_norm,none\": 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\",\n \"acc_norm,none\": 0.47593582887700536,\n\
\ \"acc_norm_stderr,none\": 0.03661929361528698\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
,\n \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.336,\n\
\ \"acc_norm_stderr,none\": 0.02993325909419153\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\":\
\ 0.031563285061213475\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.084,\n \"acc_norm_stderr,none\": 0.017578738526776348\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.508,\n \
\ \"acc_norm_stderr,none\": 0.03168215643141386\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\":\
\ 0.025537121574548162\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\": 0.02993325909419153\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.236,\n \"acc_norm_stderr,none\": 0.026909337594953852\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\"\
: \" - leaderboard_bbh_object_counting\",\n \"acc_norm,none\": 0.072,\n\
\ \"acc_norm_stderr,none\": 0.016381005750490122\n },\n \
\ \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
,\n \"acc_norm,none\": 0.2328767123287671,\n \"acc_norm_stderr,none\"\
: 0.03510036341139227\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\":\
\ 0.023692813205492536\n },\n \"leaderboard_bbh_ruin_names\": {\n\
\ \"alias\": \" - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\"\
: 0.228,\n \"acc_norm_stderr,none\": 0.026587432487268498\n },\n\
\ \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.148,\n \"acc_norm_stderr,none\": 0.022503547243806186\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.4606741573033708,\n\
\ \"acc_norm_stderr,none\": 0.03746587736387869\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\":\
\ 0.031563285061213475\n },\n \"leaderboard_bbh_temporal_sequences\"\
: {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\",\n \
\ \"acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\":\
\ 0.02391551394448624\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\":\
\ 0.023692813205492536\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\":\
\ 0.029844039047465857\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2516778523489933,\n\
\ \"acc_norm_stderr,none\": 0.012579932036787245,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.22727272727272727,\n \"acc_norm_stderr,none\": 0.029857515673386438\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.26373626373626374,\n\
\ \"acc_norm_stderr,none\": 0.018875713580372433\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.24776785714285715,\n \"acc_norm_stderr,none\"\
: 0.020419479344751287\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.066543438077634,\n \"prompt_level_strict_acc_stderr,none\": 0.01072513566576045,\n\
\ \"inst_level_strict_acc,none\": 0.10671462829736211,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.08317929759704251,\n \"prompt_level_loose_acc_stderr,none\": 0.01188373476400857,\n\
\ \"inst_level_loose_acc,none\": 0.1211031175059952,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.004531722054380665,\n \"exact_match_stderr,none\"\
: 0.0018429999361113496,\n \"alias\": \" - leaderboard_math_hard\"\n\
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.003257328990228013,\n\
\ \"exact_match_stderr,none\": 0.003257328990228013\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.01948051948051948,\n \"exact_match_stderr,none\": 0.011173331005571083\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_prealgebra_hard\",\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.007407407407407408,\n \"exact_match_stderr,none\"\
: 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.11544215425531915,\n\
\ \"acc_stderr,none\": 0.002913362107519347\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4007936507936508,\n \"acc_norm_stderr,none\"\
: 0.01742542315741125,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.544,\n\
\ \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.28515625,\n \"acc_norm_stderr,none\"\
: 0.028273327213286358\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.376,\n \"acc_norm_stderr,none\": 0.03069633626739458\n\
\ }\n },\n \"leaderboard\": {\n \"acc_norm,none\": 0.3048385004540148,\n\
\ \"acc_norm_stderr,none\": 0.004988068712886883,\n \"inst_level_strict_acc,none\"\
: 0.10671462829736211,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_strict_acc,none\": 0.066543438077634,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.01072513566576045,\n \"exact_match,none\": 0.004531722054380665,\n \
\ \"exact_match_stderr,none\": 0.0018429999361113496,\n \"inst_level_loose_acc,none\"\
: 0.1211031175059952,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"acc,none\": 0.11544215425531915,\n \"acc_stderr,none\": 0.002913362107519347,\n\
\ \"prompt_level_loose_acc,none\": 0.08317929759704251,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.01188373476400857,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.30324596424231903,\n \"acc_norm_stderr,none\"\
: 0.0057050677597805825,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.47593582887700536,\n \"acc_norm_stderr,none\"\
: 0.03661929361528698\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\"\
: 0.336,\n \"acc_norm_stderr,none\": 0.02993325909419153\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.084,\n \"acc_norm_stderr,none\": 0.017578738526776348\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.336,\n \"acc_norm_stderr,none\": 0.02993325909419153\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.236,\n \"acc_norm_stderr,none\": 0.026909337594953852\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.072,\n \"acc_norm_stderr,none\": 0.016381005750490122\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.2328767123287671,\n\
\ \"acc_norm_stderr,none\": 0.03510036341139227\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\": 0.023692813205492536\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.228,\n \"acc_norm_stderr,none\": 0.026587432487268498\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.148,\n \"acc_norm_stderr,none\": 0.022503547243806186\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.4606741573033708,\n \"acc_norm_stderr,none\"\
: 0.03746587736387869\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.027797315752644335\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.172,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\": 0.023692813205492536\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.492,\n \"acc_norm_stderr,none\": 0.03168215643141386\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.2516778523489933,\n\
\ \"acc_norm_stderr,none\": 0.012579932036787245,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.22727272727272727,\n\
\ \"acc_norm_stderr,none\": 0.029857515673386438\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.26373626373626374,\n \"acc_norm_stderr,none\": 0.018875713580372433\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.24776785714285715,\n \"acc_norm_stderr,none\"\
: 0.020419479344751287\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.066543438077634,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01072513566576045,\n \
\ \"inst_level_strict_acc,none\": 0.10671462829736211,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.08317929759704251,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01188373476400857,\n \"inst_level_loose_acc,none\"\
: 0.1211031175059952,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.004531722054380665,\n\
\ \"exact_match_stderr,none\": 0.0018429999361113496,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.003257328990228013,\n \"exact_match_stderr,none\": 0.003257328990228013\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.008130081300813009,\n \"exact_match_stderr,none\": 0.008130081300813007\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.01948051948051948,\n \"exact_match_stderr,none\": 0.011173331005571083\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0\n\
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"alias\": \" -\
\ leaderboard_math_precalculus_hard\",\n \"exact_match,none\": 0.007407407407407408,\n\
\ \"exact_match_stderr,none\": 0.007407407407407408\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.11544215425531915,\n\
\ \"acc_stderr,none\": 0.002913362107519347\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4007936507936508,\n \"acc_norm_stderr,none\"\
: 0.01742542315741125,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
,\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_musr_object_placements\": {\n \"alias\": \" -\
\ leaderboard_musr_object_placements\",\n \"acc_norm,none\": 0.28515625,\n\
\ \"acc_norm_stderr,none\": 0.028273327213286358\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\"\
: 0.376,\n \"acc_norm_stderr,none\": 0.03069633626739458\n }\n}\n```"
repo_url: https://huggingface.co/MEscriva/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_ifeval
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-01-01.509925.jsonl'
- config_name: MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T01_01_01.509925
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-01-01.509925.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-01-01.509925.jsonl'
---
# Dataset Card for Evaluation run of MEscriva/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [MEscriva/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis](https://huggingface.co/MEscriva/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis-details",
name="MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T01-01-01.509925](https://huggingface.co/datasets/open-llm-leaderboard/MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis-details/blob/main/MEscriva__ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis/results_2024-11-20T01-01-01.509925.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.3048385004540148,
"acc_norm_stderr,none": 0.004988068712886883,
"inst_level_strict_acc,none": 0.10671462829736211,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.066543438077634,
"prompt_level_strict_acc_stderr,none": 0.01072513566576045,
"exact_match,none": 0.004531722054380665,
"exact_match_stderr,none": 0.0018429999361113496,
"inst_level_loose_acc,none": 0.1211031175059952,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.11544215425531915,
"acc_stderr,none": 0.002913362107519347,
"prompt_level_loose_acc,none": 0.08317929759704251,
"prompt_level_loose_acc_stderr,none": 0.01188373476400857,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.30324596424231903,
"acc_norm_stderr,none": 0.0057050677597805825,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47593582887700536,
"acc_norm_stderr,none": 0.03661929361528698
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.084,
"acc_norm_stderr,none": 0.017578738526776348
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.236,
"acc_norm_stderr,none": 0.026909337594953852
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.072,
"acc_norm_stderr,none": 0.016381005750490122
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2328767123287671,
"acc_norm_stderr,none": 0.03510036341139227
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.228,
"acc_norm_stderr,none": 0.026587432487268498
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.148,
"acc_norm_stderr,none": 0.022503547243806186
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4606741573033708,
"acc_norm_stderr,none": 0.03746587736387869
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2516778523489933,
"acc_norm_stderr,none": 0.012579932036787245,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.22727272727272727,
"acc_norm_stderr,none": 0.029857515673386438
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.26373626373626374,
"acc_norm_stderr,none": 0.018875713580372433
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.24776785714285715,
"acc_norm_stderr,none": 0.020419479344751287
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.066543438077634,
"prompt_level_strict_acc_stderr,none": 0.01072513566576045,
"inst_level_strict_acc,none": 0.10671462829736211,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.08317929759704251,
"prompt_level_loose_acc_stderr,none": 0.01188373476400857,
"inst_level_loose_acc,none": 0.1211031175059952,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.004531722054380665,
"exact_match_stderr,none": 0.0018429999361113496,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.01948051948051948,
"exact_match_stderr,none": 0.011173331005571083
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.11544215425531915,
"acc_stderr,none": 0.002913362107519347
},
"leaderboard_musr": {
"acc_norm,none": 0.4007936507936508,
"acc_norm_stderr,none": 0.01742542315741125,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.28515625,
"acc_norm_stderr,none": 0.028273327213286358
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.376,
"acc_norm_stderr,none": 0.03069633626739458
}
},
"leaderboard": {
"acc_norm,none": 0.3048385004540148,
"acc_norm_stderr,none": 0.004988068712886883,
"inst_level_strict_acc,none": 0.10671462829736211,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.066543438077634,
"prompt_level_strict_acc_stderr,none": 0.01072513566576045,
"exact_match,none": 0.004531722054380665,
"exact_match_stderr,none": 0.0018429999361113496,
"inst_level_loose_acc,none": 0.1211031175059952,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.11544215425531915,
"acc_stderr,none": 0.002913362107519347,
"prompt_level_loose_acc,none": 0.08317929759704251,
"prompt_level_loose_acc_stderr,none": 0.01188373476400857,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.30324596424231903,
"acc_norm_stderr,none": 0.0057050677597805825,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.47593582887700536,
"acc_norm_stderr,none": 0.03661929361528698
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.084,
"acc_norm_stderr,none": 0.017578738526776348
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.236,
"acc_norm_stderr,none": 0.026909337594953852
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.072,
"acc_norm_stderr,none": 0.016381005750490122
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.2328767123287671,
"acc_norm_stderr,none": 0.03510036341139227
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.228,
"acc_norm_stderr,none": 0.026587432487268498
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.148,
"acc_norm_stderr,none": 0.022503547243806186
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.4606741573033708,
"acc_norm_stderr,none": 0.03746587736387869
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.027797315752644335
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.172,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492536
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.492,
"acc_norm_stderr,none": 0.03168215643141386
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2516778523489933,
"acc_norm_stderr,none": 0.012579932036787245,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.22727272727272727,
"acc_norm_stderr,none": 0.029857515673386438
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.26373626373626374,
"acc_norm_stderr,none": 0.018875713580372433
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.24776785714285715,
"acc_norm_stderr,none": 0.020419479344751287
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.066543438077634,
"prompt_level_strict_acc_stderr,none": 0.01072513566576045,
"inst_level_strict_acc,none": 0.10671462829736211,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.08317929759704251,
"prompt_level_loose_acc_stderr,none": 0.01188373476400857,
"inst_level_loose_acc,none": 0.1211031175059952,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.004531722054380665,
"exact_match_stderr,none": 0.0018429999361113496,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.003257328990228013,
"exact_match_stderr,none": 0.003257328990228013
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.008130081300813009,
"exact_match_stderr,none": 0.008130081300813007
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.01948051948051948,
"exact_match_stderr,none": 0.011173331005571083
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.11544215425531915,
"acc_stderr,none": 0.002913362107519347
},
"leaderboard_musr": {
"acc_norm,none": 0.4007936507936508,
"acc_norm_stderr,none": 0.01742542315741125,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.28515625,
"acc_norm_stderr,none": 0.028273327213286358
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.376,
"acc_norm_stderr,none": 0.03069633626739458
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/suayptalha__Komodo-Llama-3.2-3B-v2-fp16-details | open-llm-leaderboard | "2024-11-20T01:07:06Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T01:03:58Z" | ---
pretty_name: Evaluation run of suayptalha/Komodo-Llama-3.2-3B-v2-fp16
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [suayptalha/Komodo-Llama-3.2-3B-v2-fp16](https://huggingface.co/suayptalha/Komodo-Llama-3.2-3B-v2-fp16)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/suayptalha__Komodo-Llama-3.2-3B-v2-fp16-details\"\
,\n\tname=\"suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T01-03-58.085488](https://huggingface.co/datasets/open-llm-leaderboard/suayptalha__Komodo-Llama-3.2-3B-v2-fp16-details/blob/main/suayptalha__Komodo-Llama-3.2-3B-v2-fp16/results_2024-11-20T01-03-58.085488.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_loose_acc,none\": 0.7170263788968825,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"exact_match,none\":\
\ 0.06268882175226587,\n \"exact_match_stderr,none\": 0.006457466610383278,\n\
\ \"acc_norm,none\": 0.4005707614476586,\n \"acc_norm_stderr,none\"\
: 0.005268101584129467,\n \"acc,none\": 0.28523936170212766,\n \
\ \"acc_stderr,none\": 0.004116557429462133,\n \"prompt_level_loose_acc,none\"\
: 0.6118299445471349,\n \"prompt_level_loose_acc_stderr,none\": 0.020971500215794834,\n\
\ \"inst_level_strict_acc,none\": 0.6858513189448441,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.5822550831792976,\n \"prompt_level_strict_acc_stderr,none\": 0.02122341916161409,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.4339524388127061,\n \"acc_norm_stderr,none\"\
: 0.006138262417188087,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.82,\n\
\ \"acc_norm_stderr,none\": 0.02434689065029351\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6149732620320856,\n \"acc_norm_stderr,none\"\
: 0.03567936280544673\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.364,\n\
\ \"acc_norm_stderr,none\": 0.030491555220405475\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\":\
\ 0.03162125257572558\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.556,\n \
\ \"acc_norm_stderr,none\": 0.03148684942554571\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.324,\n \"acc_norm_stderr,none\":\
\ 0.029658294924545567\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\": 0.03162125257572558\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.616,\n \"acc_norm_stderr,none\": 0.030821679117375447\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\":\
\ 0.031235856237014505\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.3561643835616438,\n \"acc_norm_stderr,none\": 0.03976754138601307\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.628,\n \
\ \"acc_norm_stderr,none\": 0.03063032594455827\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
,\n \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652238982\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" -\
\ leaderboard_bbh_snarks\",\n \"acc_norm,none\": 0.5168539325842697,\n\
\ \"acc_norm_stderr,none\": 0.037560944447344834\n },\n \
\ \"leaderboard_bbh_sports_understanding\": {\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
,\n \"acc_norm,none\": 0.66,\n \"acc_norm_stderr,none\": 0.030020073605457876\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.316,\n\
\ \"acc_norm_stderr,none\": 0.029462657598578648\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\":\
\ 0.020886382258673272\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457873\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\":\
\ \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\": 0.488,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.27768456375838924,\n\
\ \"acc_norm_stderr,none\": 0.012987130750836516,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2727272727272727,\n \"acc_norm_stderr,none\": 0.03173071239071728\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.2838827838827839,\n\
\ \"acc_norm_stderr,none\": 0.01931360450766325\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27232142857142855,\n \"acc_norm_stderr,none\"\
: 0.02105508212932411\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.5822550831792976,\n \"prompt_level_strict_acc_stderr,none\": 0.02122341916161409,\n\
\ \"inst_level_strict_acc,none\": 0.6858513189448441,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.6118299445471349,\n \"prompt_level_loose_acc_stderr,none\": 0.020971500215794834,\n\
\ \"inst_level_loose_acc,none\": 0.7170263788968825,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.06268882175226587,\n \"exact_match_stderr,none\"\
: 0.006457466610383278,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.15960912052117263,\n\
\ \"exact_match_stderr,none\": 0.020936727679064102\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.016260162601626018,\n \"exact_match_stderr,none\": 0.011450452676925654\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.007575757575757576,\n\
\ \"exact_match_stderr,none\": 0.007575757575757577\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.0035714285714285713,\n \"exact_match_stderr,none\": 0.0035714285714285713\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.03896103896103896,\n\
\ \"exact_match_stderr,none\": 0.015643720451650286\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.10362694300518134,\n \"exact_match_stderr,none\"\
: 0.02199531196364426\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.02962962962962963,\n \"exact_match_stderr,none\"\
: 0.014648038602753809\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.28523936170212766,\n\
\ \"acc_stderr,none\": 0.004116557429462133\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.33994708994708994,\n \"acc_norm_stderr,none\"\
: 0.016688935340832455,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.512,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.26171875,\n \"acc_norm_stderr,none\"\
: 0.027526959754524398\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.248,\n \"acc_norm_stderr,none\": 0.027367497504863593\n\
\ }\n },\n \"leaderboard\": {\n \"inst_level_loose_acc,none\"\
: 0.7170263788968825,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"exact_match,none\": 0.06268882175226587,\n \"exact_match_stderr,none\"\
: 0.006457466610383278,\n \"acc_norm,none\": 0.4005707614476586,\n \
\ \"acc_norm_stderr,none\": 0.005268101584129467,\n \"acc,none\": 0.28523936170212766,\n\
\ \"acc_stderr,none\": 0.004116557429462133,\n \"prompt_level_loose_acc,none\"\
: 0.6118299445471349,\n \"prompt_level_loose_acc_stderr,none\": 0.020971500215794834,\n\
\ \"inst_level_strict_acc,none\": 0.6858513189448441,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_strict_acc,none\": 0.5822550831792976,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.02122341916161409,\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.4339524388127061,\n \"acc_norm_stderr,none\": 0.006138262417188087,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.82,\n \"acc_norm_stderr,none\": 0.02434689065029351\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6149732620320856,\n \"acc_norm_stderr,none\"\
: 0.03567936280544673\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\"\
: 0.364,\n \"acc_norm_stderr,none\": 0.030491555220405475\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\": 0.03162125257572558\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.324,\n \"acc_norm_stderr,none\": 0.029658294924545567\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.468,\n \"acc_norm_stderr,none\": 0.03162125257572558\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.616,\n \"acc_norm_stderr,none\": 0.030821679117375447\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.3561643835616438,\n\
\ \"acc_norm_stderr,none\": 0.03976754138601307\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.628,\n \"acc_norm_stderr,none\": 0.03063032594455827\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652238982\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.5168539325842697,\n \"acc_norm_stderr,none\"\
: 0.037560944447344834\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.66,\n \"acc_norm_stderr,none\": 0.030020073605457876\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038166\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.124,\n \"acc_norm_stderr,none\": 0.020886382258673272\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457873\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.27768456375838924,\n\
\ \"acc_norm_stderr,none\": 0.012987130750836516,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2727272727272727,\n\
\ \"acc_norm_stderr,none\": 0.03173071239071728\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.2838827838827839,\n \"acc_norm_stderr,none\": 0.01931360450766325\n \
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.27232142857142855,\n \"acc_norm_stderr,none\"\
: 0.02105508212932411\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.5822550831792976,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.02122341916161409,\n \
\ \"inst_level_strict_acc,none\": 0.6858513189448441,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.6118299445471349,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.020971500215794834,\n \"inst_level_loose_acc,none\"\
: 0.7170263788968825,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.06268882175226587,\n\
\ \"exact_match_stderr,none\": 0.006457466610383278,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.15960912052117263,\n \"exact_match_stderr,none\": 0.020936727679064102\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.016260162601626018,\n \"exact_match_stderr,none\": 0.011450452676925654\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.007575757575757576,\n \"exact_match_stderr,none\"\
: 0.007575757575757577\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.0035714285714285713,\n \"exact_match_stderr,none\"\
: 0.0035714285714285713\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.03896103896103896,\n \"exact_match_stderr,none\": 0.015643720451650286\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.10362694300518134,\n \"exact_match_stderr,none\"\
: 0.02199531196364426\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.02962962962962963,\n \"exact_match_stderr,none\": 0.014648038602753809\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.28523936170212766,\n \"acc_stderr,none\": 0.004116557429462133\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.33994708994708994,\n\
\ \"acc_norm_stderr,none\": 0.016688935340832455,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.26171875,\n \"acc_norm_stderr,none\": 0.027526959754524398\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.248,\n \"acc_norm_stderr,none\": 0.027367497504863593\n\
\ }\n}\n```"
repo_url: https://huggingface.co/suayptalha/Komodo-Llama-3.2-3B-v2-fp16
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_ifeval
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-03-58.085488.jsonl'
- config_name: suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T01_03_58.085488
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-03-58.085488.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-03-58.085488.jsonl'
---
# Dataset Card for Evaluation run of suayptalha/Komodo-Llama-3.2-3B-v2-fp16
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [suayptalha/Komodo-Llama-3.2-3B-v2-fp16](https://huggingface.co/suayptalha/Komodo-Llama-3.2-3B-v2-fp16)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/suayptalha__Komodo-Llama-3.2-3B-v2-fp16-details",
name="suayptalha__Komodo-Llama-3.2-3B-v2-fp16__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T01-03-58.085488](https://huggingface.co/datasets/open-llm-leaderboard/suayptalha__Komodo-Llama-3.2-3B-v2-fp16-details/blob/main/suayptalha__Komodo-Llama-3.2-3B-v2-fp16/results_2024-11-20T01-03-58.085488.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_loose_acc,none": 0.7170263788968825,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.06268882175226587,
"exact_match_stderr,none": 0.006457466610383278,
"acc_norm,none": 0.4005707614476586,
"acc_norm_stderr,none": 0.005268101584129467,
"acc,none": 0.28523936170212766,
"acc_stderr,none": 0.004116557429462133,
"prompt_level_loose_acc,none": 0.6118299445471349,
"prompt_level_loose_acc_stderr,none": 0.020971500215794834,
"inst_level_strict_acc,none": 0.6858513189448441,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.5822550831792976,
"prompt_level_strict_acc_stderr,none": 0.02122341916161409,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4339524388127061,
"acc_norm_stderr,none": 0.006138262417188087,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.82,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.616,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.3561643835616438,
"acc_norm_stderr,none": 0.03976754138601307
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.03063032594455827
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5168539325842697,
"acc_norm_stderr,none": 0.037560944447344834
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.66,
"acc_norm_stderr,none": 0.030020073605457876
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457873
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27768456375838924,
"acc_norm_stderr,none": 0.012987130750836516,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071728
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2838827838827839,
"acc_norm_stderr,none": 0.01931360450766325
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27232142857142855,
"acc_norm_stderr,none": 0.02105508212932411
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5822550831792976,
"prompt_level_strict_acc_stderr,none": 0.02122341916161409,
"inst_level_strict_acc,none": 0.6858513189448441,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.6118299445471349,
"prompt_level_loose_acc_stderr,none": 0.020971500215794834,
"inst_level_loose_acc,none": 0.7170263788968825,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.06268882175226587,
"exact_match_stderr,none": 0.006457466610383278,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.15960912052117263,
"exact_match_stderr,none": 0.020936727679064102
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925654
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285713
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.03896103896103896,
"exact_match_stderr,none": 0.015643720451650286
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.10362694300518134,
"exact_match_stderr,none": 0.02199531196364426
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.02962962962962963,
"exact_match_stderr,none": 0.014648038602753809
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.28523936170212766,
"acc_stderr,none": 0.004116557429462133
},
"leaderboard_musr": {
"acc_norm,none": 0.33994708994708994,
"acc_norm_stderr,none": 0.016688935340832455,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.26171875,
"acc_norm_stderr,none": 0.027526959754524398
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.248,
"acc_norm_stderr,none": 0.027367497504863593
}
},
"leaderboard": {
"inst_level_loose_acc,none": 0.7170263788968825,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.06268882175226587,
"exact_match_stderr,none": 0.006457466610383278,
"acc_norm,none": 0.4005707614476586,
"acc_norm_stderr,none": 0.005268101584129467,
"acc,none": 0.28523936170212766,
"acc_stderr,none": 0.004116557429462133,
"prompt_level_loose_acc,none": 0.6118299445471349,
"prompt_level_loose_acc_stderr,none": 0.020971500215794834,
"inst_level_strict_acc,none": 0.6858513189448441,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.5822550831792976,
"prompt_level_strict_acc_stderr,none": 0.02122341916161409,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4339524388127061,
"acc_norm_stderr,none": 0.006138262417188087,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.82,
"acc_norm_stderr,none": 0.02434689065029351
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.324,
"acc_norm_stderr,none": 0.029658294924545567
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572558
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.616,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.3561643835616438,
"acc_norm_stderr,none": 0.03976754138601307
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.03063032594455827
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652238982
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.5168539325842697,
"acc_norm_stderr,none": 0.037560944447344834
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.66,
"acc_norm_stderr,none": 0.030020073605457876
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038166
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.124,
"acc_norm_stderr,none": 0.020886382258673272
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457873
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27768456375838924,
"acc_norm_stderr,none": 0.012987130750836516,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071728
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.2838827838827839,
"acc_norm_stderr,none": 0.01931360450766325
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.27232142857142855,
"acc_norm_stderr,none": 0.02105508212932411
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5822550831792976,
"prompt_level_strict_acc_stderr,none": 0.02122341916161409,
"inst_level_strict_acc,none": 0.6858513189448441,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.6118299445471349,
"prompt_level_loose_acc_stderr,none": 0.020971500215794834,
"inst_level_loose_acc,none": 0.7170263788968825,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.06268882175226587,
"exact_match_stderr,none": 0.006457466610383278,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.15960912052117263,
"exact_match_stderr,none": 0.020936727679064102
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925654
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285713
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.03896103896103896,
"exact_match_stderr,none": 0.015643720451650286
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.10362694300518134,
"exact_match_stderr,none": 0.02199531196364426
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.02962962962962963,
"exact_match_stderr,none": 0.014648038602753809
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.28523936170212766,
"acc_stderr,none": 0.004116557429462133
},
"leaderboard_musr": {
"acc_norm,none": 0.33994708994708994,
"acc_norm_stderr,none": 0.016688935340832455,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.26171875,
"acc_norm_stderr,none": 0.027526959754524398
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.248,
"acc_norm_stderr,none": 0.027367497504863593
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/theprint__WorldBuilder-12B-details | open-llm-leaderboard | "2024-11-20T01:27:44Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T01:24:41Z" | ---
pretty_name: Evaluation run of theprint/WorldBuilder-12B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [theprint/WorldBuilder-12B](https://huggingface.co/theprint/WorldBuilder-12B)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/theprint__WorldBuilder-12B-details\"\
,\n\tname=\"theprint__WorldBuilder-12B__leaderboard_bbh_boolean_expressions\",\n\
\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T01-24-40.337241](https://huggingface.co/datasets/open-llm-leaderboard/theprint__WorldBuilder-12B-details/blob/main/theprint__WorldBuilder-12B/results_2024-11-20T01-24-40.337241.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"exact_match,none\": 0.03625377643504532,\n \"exact_match_stderr,none\"\
: 0.005106903112070287,\n \"acc_norm,none\": 0.45868465430016864,\n \
\ \"acc_norm_stderr,none\": 0.0053010760025434655,\n \"prompt_level_loose_acc,none\"\
: 0.12199630314232902,\n \"prompt_level_loose_acc_stderr,none\": 0.014083954960767047,\n\
\ \"inst_level_strict_acc,none\": 0.1750599520383693,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"inst_level_loose_acc,none\"\
: 0.2038369304556355,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"prompt_level_strict_acc,none\": 0.09981515711645102,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.012899331846482735,\n \
\ \"acc,none\": 0.31923204787234044,\n \"acc_stderr,none\": 0.004250127218302292,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.49904530463461205,\n \"acc_norm_stderr,none\"\
: 0.006124957989583315,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.856,\n\
\ \"acc_norm_stderr,none\": 0.022249407735450245\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6149732620320856,\n \"acc_norm_stderr,none\"\
: 0.03567936280544673\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.636,\n \"acc_norm_stderr,none\": 0.030491555220405475\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.656,\n\
\ \"acc_norm_stderr,none\": 0.03010450339231644\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\":\
\ 0.0316851985511992\n },\n \"leaderboard_bbh_geometric_shapes\":\
\ {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.456,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.488,\n \
\ \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\":\
\ 0.03109668818482536\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\":\
\ 0.030491555220405475\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\
,\n \"acc_norm,none\": 0.692,\n \"acc_norm_stderr,none\":\
\ 0.02925692860650181\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \
\ \"acc_norm,none\": 0.784,\n \"acc_norm_stderr,none\": 0.02607865766373279\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \"\
\ - leaderboard_bbh_navigate\",\n \"acc_norm,none\": 0.596,\n \
\ \"acc_norm_stderr,none\": 0.03109668818482536\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.4520547945205479,\n \"acc_norm_stderr,none\": 0.04133139887430771\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.456,\n \
\ \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457873\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6292134831460674,\n \"acc_norm_stderr,none\"\
: 0.03630566062020178\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.828,\n \"acc_norm_stderr,none\": 0.02391551394448624\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.308,\n\
\ \"acc_norm_stderr,none\": 0.02925692860650181\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\":\
\ 0.030491555220405475\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.29697986577181207,\n\
\ \"acc_norm_stderr,none\": 0.01324978644396749,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.2828282828282828,\n \"acc_norm_stderr,none\": 0.032087795587867514\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.3021978021978022,\n\
\ \"acc_norm_stderr,none\": 0.019670416969439074\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.296875,\n \"acc_norm_stderr,none\"\
: 0.021609729061250887\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.09981515711645102,\n \"prompt_level_strict_acc_stderr,none\": 0.012899331846482735,\n\
\ \"inst_level_strict_acc,none\": 0.1750599520383693,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.12199630314232902,\n \"prompt_level_loose_acc_stderr,none\": 0.014083954960767047,\n\
\ \"inst_level_loose_acc,none\": 0.2038369304556355,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.03625377643504532,\n \"exact_match_stderr,none\"\
: 0.005106903112070287,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.05863192182410423,\n\
\ \"exact_match_stderr,none\": 0.013430311484096614\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.032520325203252036,\n \"exact_match_stderr,none\": 0.016058998205879745\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.007575757575757576,\n\
\ \"exact_match_stderr,none\": 0.007575757575757577\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.007142857142857143,\n \"exact_match_stderr,none\": 0.005041703051390571\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.03896103896103896,\n\
\ \"exact_match_stderr,none\": 0.015643720451650286\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.07253886010362694,\n \"exact_match_stderr,none\"\
: 0.018718998520678213\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.022222222222222223,\n \"exact_match_stderr,none\"\
: 0.01273389971505968\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.31923204787234044,\n\
\ \"acc_stderr,none\": 0.004250127218302292\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4060846560846561,\n \"acc_norm_stderr,none\"\
: 0.017523625941839528,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.548,\n\
\ \"acc_norm_stderr,none\": 0.03153986449255664\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.3359375,\n \"acc_norm_stderr,none\"\
: 0.029577647634376425\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\": 0.02993325909419153\n\
\ }\n },\n \"leaderboard\": {\n \"exact_match,none\": 0.03625377643504532,\n\
\ \"exact_match_stderr,none\": 0.005106903112070287,\n \"acc_norm,none\"\
: 0.45868465430016864,\n \"acc_norm_stderr,none\": 0.0053010760025434655,\n\
\ \"prompt_level_loose_acc,none\": 0.12199630314232902,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.014083954960767047,\n \"inst_level_strict_acc,none\": 0.1750599520383693,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"inst_level_loose_acc,none\"\
: 0.2038369304556355,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_strict_acc,none\": 0.09981515711645102,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.012899331846482735,\n \"acc,none\": 0.31923204787234044,\n \"\
acc_stderr,none\": 0.004250127218302292,\n \"alias\": \"leaderboard\"\n \
\ },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.49904530463461205,\n\
\ \"acc_norm_stderr,none\": 0.006124957989583315,\n \"alias\": \"\
\ - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\"\
: 0.856,\n \"acc_norm_stderr,none\": 0.022249407735450245\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6149732620320856,\n \"acc_norm_stderr,none\"\
: 0.03567936280544673\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.636,\n \"acc_norm_stderr,none\": 0.030491555220405475\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.656,\n \"acc_norm_stderr,none\": 0.03010450339231644\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.496,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.456,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\": 0.030491555220405475\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.692,\n \"acc_norm_stderr,none\": 0.02925692860650181\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.784,\n \"acc_norm_stderr,none\": 0.02607865766373279\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.596,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626098\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.4520547945205479,\n\
\ \"acc_norm_stderr,none\": 0.04133139887430771\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.432,\n \"acc_norm_stderr,none\": 0.03139181076542942\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.456,\n \"acc_norm_stderr,none\": 0.031563285061213475\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457873\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6292134831460674,\n \"acc_norm_stderr,none\"\
: 0.03630566062020178\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.828,\n \"acc_norm_stderr,none\": 0.02391551394448624\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.308,\n \"acc_norm_stderr,none\": 0.02925692860650181\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\": 0.030491555220405475\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.29697986577181207,\n\
\ \"acc_norm_stderr,none\": 0.01324978644396749,\n \"alias\": \" -\
\ leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"alias\"\
: \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.2828282828282828,\n\
\ \"acc_norm_stderr,none\": 0.032087795587867514\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.3021978021978022,\n \"acc_norm_stderr,none\": 0.019670416969439074\n\
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.296875,\n \"acc_norm_stderr,none\": 0.021609729061250887\n\
\ },\n \"leaderboard_ifeval\": {\n \"alias\": \" - leaderboard_ifeval\"\
,\n \"prompt_level_strict_acc,none\": 0.09981515711645102,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.012899331846482735,\n \"inst_level_strict_acc,none\": 0.1750599520383693,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.12199630314232902,\n \"prompt_level_loose_acc_stderr,none\": 0.014083954960767047,\n\
\ \"inst_level_loose_acc,none\": 0.2038369304556355,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.03625377643504532,\n \"exact_match_stderr,none\": 0.005106903112070287,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.05863192182410423,\n \"exact_match_stderr,none\": 0.013430311484096614\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.032520325203252036,\n \"exact_match_stderr,none\": 0.016058998205879745\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.007575757575757576,\n \"exact_match_stderr,none\"\
: 0.007575757575757577\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.007142857142857143,\n \"exact_match_stderr,none\"\
: 0.005041703051390571\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.03896103896103896,\n \"exact_match_stderr,none\": 0.015643720451650286\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.07253886010362694,\n \"exact_match_stderr,none\"\
: 0.018718998520678213\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.022222222222222223,\n \"exact_match_stderr,none\": 0.01273389971505968\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.31923204787234044,\n \"acc_stderr,none\": 0.004250127218302292\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.4060846560846561,\n\
\ \"acc_norm_stderr,none\": 0.017523625941839528,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255664\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.3359375,\n \"acc_norm_stderr,none\": 0.029577647634376425\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\": 0.02993325909419153\n\
\ }\n}\n```"
repo_url: https://huggingface.co/theprint/WorldBuilder-12B
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_ifeval
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-24-40.337241.jsonl'
- config_name: theprint__WorldBuilder-12B__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T01_24_40.337241
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-24-40.337241.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-24-40.337241.jsonl'
---
# Dataset Card for Evaluation run of theprint/WorldBuilder-12B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [theprint/WorldBuilder-12B](https://huggingface.co/theprint/WorldBuilder-12B)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/theprint__WorldBuilder-12B-details",
name="theprint__WorldBuilder-12B__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T01-24-40.337241](https://huggingface.co/datasets/open-llm-leaderboard/theprint__WorldBuilder-12B-details/blob/main/theprint__WorldBuilder-12B/results_2024-11-20T01-24-40.337241.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"exact_match,none": 0.03625377643504532,
"exact_match_stderr,none": 0.005106903112070287,
"acc_norm,none": 0.45868465430016864,
"acc_norm_stderr,none": 0.0053010760025434655,
"prompt_level_loose_acc,none": 0.12199630314232902,
"prompt_level_loose_acc_stderr,none": 0.014083954960767047,
"inst_level_strict_acc,none": 0.1750599520383693,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.2038369304556355,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.09981515711645102,
"prompt_level_strict_acc_stderr,none": 0.012899331846482735,
"acc,none": 0.31923204787234044,
"acc_stderr,none": 0.004250127218302292,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.49904530463461205,
"acc_norm_stderr,none": 0.006124957989583315,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.856,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.636,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.656,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.692,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.784,
"acc_norm_stderr,none": 0.02607865766373279
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.596,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4520547945205479,
"acc_norm_stderr,none": 0.04133139887430771
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457873
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6292134831460674,
"acc_norm_stderr,none": 0.03630566062020178
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.828,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.29697986577181207,
"acc_norm_stderr,none": 0.01324978644396749,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2828282828282828,
"acc_norm_stderr,none": 0.032087795587867514
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3021978021978022,
"acc_norm_stderr,none": 0.019670416969439074
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.296875,
"acc_norm_stderr,none": 0.021609729061250887
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.09981515711645102,
"prompt_level_strict_acc_stderr,none": 0.012899331846482735,
"inst_level_strict_acc,none": 0.1750599520383693,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.12199630314232902,
"prompt_level_loose_acc_stderr,none": 0.014083954960767047,
"inst_level_loose_acc,none": 0.2038369304556355,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.03625377643504532,
"exact_match_stderr,none": 0.005106903112070287,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.05863192182410423,
"exact_match_stderr,none": 0.013430311484096614
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.032520325203252036,
"exact_match_stderr,none": 0.016058998205879745
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.007142857142857143,
"exact_match_stderr,none": 0.005041703051390571
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.03896103896103896,
"exact_match_stderr,none": 0.015643720451650286
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.07253886010362694,
"exact_match_stderr,none": 0.018718998520678213
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.022222222222222223,
"exact_match_stderr,none": 0.01273389971505968
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.31923204787234044,
"acc_stderr,none": 0.004250127218302292
},
"leaderboard_musr": {
"acc_norm,none": 0.4060846560846561,
"acc_norm_stderr,none": 0.017523625941839528,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.3359375,
"acc_norm_stderr,none": 0.029577647634376425
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
}
},
"leaderboard": {
"exact_match,none": 0.03625377643504532,
"exact_match_stderr,none": 0.005106903112070287,
"acc_norm,none": 0.45868465430016864,
"acc_norm_stderr,none": 0.0053010760025434655,
"prompt_level_loose_acc,none": 0.12199630314232902,
"prompt_level_loose_acc_stderr,none": 0.014083954960767047,
"inst_level_strict_acc,none": 0.1750599520383693,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.2038369304556355,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.09981515711645102,
"prompt_level_strict_acc_stderr,none": 0.012899331846482735,
"acc,none": 0.31923204787234044,
"acc_stderr,none": 0.004250127218302292,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.49904530463461205,
"acc_norm_stderr,none": 0.006124957989583315,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.856,
"acc_norm_stderr,none": 0.022249407735450245
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6149732620320856,
"acc_norm_stderr,none": 0.03567936280544673
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.636,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.656,
"acc_norm_stderr,none": 0.03010450339231644
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.496,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.692,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.784,
"acc_norm_stderr,none": 0.02607865766373279
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.596,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626098
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4520547945205479,
"acc_norm_stderr,none": 0.04133139887430771
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.432,
"acc_norm_stderr,none": 0.03139181076542942
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457873
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6292134831460674,
"acc_norm_stderr,none": 0.03630566062020178
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.828,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.02925692860650181
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.030491555220405475
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.29697986577181207,
"acc_norm_stderr,none": 0.01324978644396749,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.2828282828282828,
"acc_norm_stderr,none": 0.032087795587867514
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3021978021978022,
"acc_norm_stderr,none": 0.019670416969439074
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.296875,
"acc_norm_stderr,none": 0.021609729061250887
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.09981515711645102,
"prompt_level_strict_acc_stderr,none": 0.012899331846482735,
"inst_level_strict_acc,none": 0.1750599520383693,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.12199630314232902,
"prompt_level_loose_acc_stderr,none": 0.014083954960767047,
"inst_level_loose_acc,none": 0.2038369304556355,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.03625377643504532,
"exact_match_stderr,none": 0.005106903112070287,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.05863192182410423,
"exact_match_stderr,none": 0.013430311484096614
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.032520325203252036,
"exact_match_stderr,none": 0.016058998205879745
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.007142857142857143,
"exact_match_stderr,none": 0.005041703051390571
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.03896103896103896,
"exact_match_stderr,none": 0.015643720451650286
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.07253886010362694,
"exact_match_stderr,none": 0.018718998520678213
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.022222222222222223,
"exact_match_stderr,none": 0.01273389971505968
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.31923204787234044,
"acc_stderr,none": 0.004250127218302292
},
"leaderboard_musr": {
"acc_norm,none": 0.4060846560846561,
"acc_norm_stderr,none": 0.017523625941839528,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.3359375,
"acc_norm_stderr,none": 0.029577647634376425
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.02993325909419153
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5-details | open-llm-leaderboard | "2024-11-20T01:43:20Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T01:39:51Z" | ---
pretty_name: Evaluation run of jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.5
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.5](https://huggingface.co/jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.5)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5-details\"\
,\n\tname=\"jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T01-39-50.321433](https://huggingface.co/datasets/open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5-details/blob/main/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5/results_2024-11-20T01-39-50.321433.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"prompt_level_strict_acc,none\": 0.5101663585951941,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.021512126001350697,\n \"\
acc,none\": 0.44481382978723405,\n \"acc_stderr,none\": 0.004530619574000465,\n\
\ \"acc_norm,none\": 0.5103126216111039,\n \"acc_norm_stderr,none\"\
: 0.005402586149069291,\n \"inst_level_loose_acc,none\": 0.6618705035971223,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"inst_level_strict_acc,none\"\
: 0.6199040767386091,\n \"inst_level_strict_acc_stderr,none\": \"N/A\"\
,\n \"exact_match,none\": 0.22280966767371602,\n \"exact_match_stderr,none\"\
: 0.010883975817751943,\n \"prompt_level_loose_acc,none\": 0.5637707948243993,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.021340853089940322,\n \
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n\
\ \"acc_norm,none\": 0.5499045304634612,\n \"acc_norm_stderr,none\"\
: 0.006226637172183432,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.816,\n\
\ \"acc_norm_stderr,none\": 0.02455581299422255\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5935828877005348,\n \"acc_norm_stderr,none\"\
: 0.036013904358574385\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.592,\n \"acc_norm_stderr,none\": 0.03114520984654851\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.66,\n\
\ \"acc_norm_stderr,none\": 0.030020073605457876\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\":\
\ 0.031235856237014505\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.584,\n \
\ \"acc_norm_stderr,none\": 0.031235856237014505\n },\n \"\
leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\": \" \
\ - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.44,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.78,\n \"acc_norm_stderr,none\": 0.02625179282460579\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.652,\n \"acc_norm_stderr,none\": 0.030186568464511673\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\":\
\ 0.030186568464511673\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.44,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.5616438356164384,\n \"acc_norm_stderr,none\": 0.04120596186613957\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.616,\n \"acc_norm_stderr,none\": 0.030821679117375447\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.524,\n \
\ \"acc_norm_stderr,none\": 0.03164968895968774\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
,\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\":\
\ 0.031621252575725574\n },\n \"leaderboard_bbh_snarks\": {\n \
\ \"alias\": \" - leaderboard_bbh_snarks\",\n \"acc_norm,none\"\
: 0.6910112359550562,\n \"acc_norm_stderr,none\": 0.0347317978779636\n\
\ },\n \"leaderboard_bbh_sports_understanding\": {\n \"\
alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.732,\n \"acc_norm_stderr,none\": 0.02806876238252672\n },\n\
\ \"leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" -\
\ leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.592,\n\
\ \"acc_norm_stderr,none\": 0.03114520984654851\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.176,\n \"acc_norm_stderr,none\":\
\ 0.024133497525457123\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.276,\n \"acc_norm_stderr,none\":\
\ 0.02832853727421142\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3271812080536913,\n\
\ \"acc_norm_stderr,none\": 0.01359263351617689,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.37373737373737376,\n \"acc_norm_stderr,none\": 0.03446897738659336\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.3131868131868132,\n\
\ \"acc_norm_stderr,none\": 0.01986656558013767\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3236607142857143,\n \"acc_norm_stderr,none\"\
: 0.022129598768200833\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.5101663585951941,\n \"prompt_level_strict_acc_stderr,none\": 0.021512126001350697,\n\
\ \"inst_level_strict_acc,none\": 0.6199040767386091,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.5637707948243993,\n \"prompt_level_loose_acc_stderr,none\": 0.021340853089940322,\n\
\ \"inst_level_loose_acc,none\": 0.6618705035971223,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.22280966767371602,\n \"exact_match_stderr,none\"\
: 0.010883975817751943,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.41368078175895767,\n\
\ \"exact_match_stderr,none\": 0.028153929859671776\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.18699186991869918,\n \"exact_match_stderr,none\": 0.03530034023230448\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.10606060606060606,\n\
\ \"exact_match_stderr,none\": 0.026902673595485146\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.09285714285714286,\n \"exact_match_stderr,none\": 0.01737574359591369\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.2012987012987013,\n\
\ \"exact_match_stderr,none\": 0.03241656299738023\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.32642487046632124,\n \"exact_match_stderr,none\"\
: 0.03384028621143298\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.08148148148148149,\n \"exact_match_stderr,none\"\
: 0.023633117677786884\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.44481382978723405,\n\
\ \"acc_stderr,none\": 0.004530619574000465\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4973544973544973,\n \"acc_norm_stderr,none\"\
: 0.018006570007816334,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.556,\n\
\ \"acc_norm_stderr,none\": 0.03148684942554571\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.390625,\n \"acc_norm_stderr,none\"\
: 0.030552886284181364\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255664\n\
\ }\n },\n \"leaderboard\": {\n \"prompt_level_strict_acc,none\"\
: 0.5101663585951941,\n \"prompt_level_strict_acc_stderr,none\": 0.021512126001350697,\n\
\ \"acc,none\": 0.44481382978723405,\n \"acc_stderr,none\": 0.004530619574000465,\n\
\ \"acc_norm,none\": 0.5103126216111039,\n \"acc_norm_stderr,none\"\
: 0.005402586149069291,\n \"inst_level_loose_acc,none\": 0.6618705035971223,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"inst_level_strict_acc,none\"\
: 0.6199040767386091,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"exact_match,none\": 0.22280966767371602,\n \"exact_match_stderr,none\"\
: 0.010883975817751943,\n \"prompt_level_loose_acc,none\": 0.5637707948243993,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.021340853089940322,\n \
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.5499045304634612,\n \"acc_norm_stderr,none\": 0.006226637172183432,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\",\n \"\
acc_norm,none\": 0.816,\n \"acc_norm_stderr,none\": 0.02455581299422255\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.5935828877005348,\n \"acc_norm_stderr,none\"\
: 0.036013904358574385\n },\n \"leaderboard_bbh_date_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.592,\n \"acc_norm_stderr,none\": 0.03114520984654851\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.66,\n \"acc_norm_stderr,none\": 0.030020073605457876\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.03151438761115348\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014505\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.44,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.78,\n \"acc_norm_stderr,none\": 0.02625179282460579\n },\n \"leaderboard_bbh_movie_recommendation\"\
: {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"\
acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\": 0.030186568464511673\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\": 0.030186568464511673\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.44,\n \"acc_norm_stderr,none\": 0.03145724452223569\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.5616438356164384,\n\
\ \"acc_norm_stderr,none\": 0.04120596186613957\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.616,\n \"acc_norm_stderr,none\": 0.030821679117375447\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.524,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6910112359550562,\n \"acc_norm_stderr,none\"\
: 0.0347317978779636\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.732,\n \"acc_norm_stderr,none\": 0.02806876238252672\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.592,\n \"acc_norm_stderr,none\": 0.03114520984654851\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.176,\n \"acc_norm_stderr,none\": 0.024133497525457123\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.276,\n \"acc_norm_stderr,none\": 0.02832853727421142\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3271812080536913,\n\
\ \"acc_norm_stderr,none\": 0.01359263351617689,\n \"alias\": \" -\
\ leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"alias\"\
: \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.37373737373737376,\n\
\ \"acc_norm_stderr,none\": 0.03446897738659336\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.3131868131868132,\n \"acc_norm_stderr,none\": 0.01986656558013767\n \
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3236607142857143,\n \"acc_norm_stderr,none\"\
: 0.022129598768200833\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.5101663585951941,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.021512126001350697,\n \
\ \"inst_level_strict_acc,none\": 0.6199040767386091,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.5637707948243993,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.021340853089940322,\n \"inst_level_loose_acc,none\"\
: 0.6618705035971223,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.22280966767371602,\n\
\ \"exact_match_stderr,none\": 0.010883975817751943,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.41368078175895767,\n \"exact_match_stderr,none\": 0.028153929859671776\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.18699186991869918,\n \"exact_match_stderr,none\": 0.03530034023230448\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.10606060606060606,\n \"exact_match_stderr,none\"\
: 0.026902673595485146\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.09285714285714286,\n \"exact_match_stderr,none\"\
: 0.01737574359591369\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.2012987012987013,\n \"exact_match_stderr,none\": 0.03241656299738023\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.32642487046632124,\n \"exact_match_stderr,none\"\
: 0.03384028621143298\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.08148148148148149,\n \"exact_match_stderr,none\": 0.023633117677786884\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.44481382978723405,\n \"acc_stderr,none\": 0.004530619574000465\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.4973544973544973,\n\
\ \"acc_norm_stderr,none\": 0.018006570007816334,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554571\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.390625,\n \"acc_norm_stderr,none\": 0.030552886284181364\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255664\n\
\ }\n}\n```"
repo_url: https://huggingface.co/jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.5
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_ifeval
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T01-39-50.321433.jsonl'
- config_name: jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T01_39_50.321433
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-39-50.321433.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T01-39-50.321433.jsonl'
---
# Dataset Card for Evaluation run of jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.5
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.5](https://huggingface.co/jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.5)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5-details",
name="jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T01-39-50.321433](https://huggingface.co/datasets/open-llm-leaderboard/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5-details/blob/main/jeffmeloy__Qwen2.5-7B-nerd-uncensored-v1.5/results_2024-11-20T01-39-50.321433.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"prompt_level_strict_acc,none": 0.5101663585951941,
"prompt_level_strict_acc_stderr,none": 0.021512126001350697,
"acc,none": 0.44481382978723405,
"acc_stderr,none": 0.004530619574000465,
"acc_norm,none": 0.5103126216111039,
"acc_norm_stderr,none": 0.005402586149069291,
"inst_level_loose_acc,none": 0.6618705035971223,
"inst_level_loose_acc_stderr,none": "N/A",
"inst_level_strict_acc,none": 0.6199040767386091,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.22280966767371602,
"exact_match_stderr,none": 0.010883975817751943,
"prompt_level_loose_acc,none": 0.5637707948243993,
"prompt_level_loose_acc_stderr,none": 0.021340853089940322,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5499045304634612,
"acc_norm_stderr,none": 0.006226637172183432,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.816,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5935828877005348,
"acc_norm_stderr,none": 0.036013904358574385
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.66,
"acc_norm_stderr,none": 0.030020073605457876
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.78,
"acc_norm_stderr,none": 0.02625179282460579
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.5616438356164384,
"acc_norm_stderr,none": 0.04120596186613957
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.616,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6910112359550562,
"acc_norm_stderr,none": 0.0347317978779636
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.732,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.176,
"acc_norm_stderr,none": 0.024133497525457123
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.276,
"acc_norm_stderr,none": 0.02832853727421142
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3271812080536913,
"acc_norm_stderr,none": 0.01359263351617689,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.37373737373737376,
"acc_norm_stderr,none": 0.03446897738659336
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3131868131868132,
"acc_norm_stderr,none": 0.01986656558013767
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3236607142857143,
"acc_norm_stderr,none": 0.022129598768200833
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5101663585951941,
"prompt_level_strict_acc_stderr,none": 0.021512126001350697,
"inst_level_strict_acc,none": 0.6199040767386091,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.5637707948243993,
"prompt_level_loose_acc_stderr,none": 0.021340853089940322,
"inst_level_loose_acc,none": 0.6618705035971223,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.22280966767371602,
"exact_match_stderr,none": 0.010883975817751943,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.41368078175895767,
"exact_match_stderr,none": 0.028153929859671776
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.18699186991869918,
"exact_match_stderr,none": 0.03530034023230448
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.10606060606060606,
"exact_match_stderr,none": 0.026902673595485146
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.09285714285714286,
"exact_match_stderr,none": 0.01737574359591369
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.2012987012987013,
"exact_match_stderr,none": 0.03241656299738023
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.32642487046632124,
"exact_match_stderr,none": 0.03384028621143298
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.08148148148148149,
"exact_match_stderr,none": 0.023633117677786884
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.44481382978723405,
"acc_stderr,none": 0.004530619574000465
},
"leaderboard_musr": {
"acc_norm,none": 0.4973544973544973,
"acc_norm_stderr,none": 0.018006570007816334,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.390625,
"acc_norm_stderr,none": 0.030552886284181364
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
}
},
"leaderboard": {
"prompt_level_strict_acc,none": 0.5101663585951941,
"prompt_level_strict_acc_stderr,none": 0.021512126001350697,
"acc,none": 0.44481382978723405,
"acc_stderr,none": 0.004530619574000465,
"acc_norm,none": 0.5103126216111039,
"acc_norm_stderr,none": 0.005402586149069291,
"inst_level_loose_acc,none": 0.6618705035971223,
"inst_level_loose_acc_stderr,none": "N/A",
"inst_level_strict_acc,none": 0.6199040767386091,
"inst_level_strict_acc_stderr,none": "N/A",
"exact_match,none": 0.22280966767371602,
"exact_match_stderr,none": 0.010883975817751943,
"prompt_level_loose_acc,none": 0.5637707948243993,
"prompt_level_loose_acc_stderr,none": 0.021340853089940322,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5499045304634612,
"acc_norm_stderr,none": 0.006226637172183432,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.816,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.5935828877005348,
"acc_norm_stderr,none": 0.036013904358574385
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.66,
"acc_norm_stderr,none": 0.030020073605457876
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.03151438761115348
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014505
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.78,
"acc_norm_stderr,none": 0.02625179282460579
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511673
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.03145724452223569
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.5616438356164384,
"acc_norm_stderr,none": 0.04120596186613957
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.616,
"acc_norm_stderr,none": 0.030821679117375447
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.524,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6910112359550562,
"acc_norm_stderr,none": 0.0347317978779636
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.732,
"acc_norm_stderr,none": 0.02806876238252672
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.176,
"acc_norm_stderr,none": 0.024133497525457123
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.276,
"acc_norm_stderr,none": 0.02832853727421142
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3271812080536913,
"acc_norm_stderr,none": 0.01359263351617689,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.37373737373737376,
"acc_norm_stderr,none": 0.03446897738659336
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.3131868131868132,
"acc_norm_stderr,none": 0.01986656558013767
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3236607142857143,
"acc_norm_stderr,none": 0.022129598768200833
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.5101663585951941,
"prompt_level_strict_acc_stderr,none": 0.021512126001350697,
"inst_level_strict_acc,none": 0.6199040767386091,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.5637707948243993,
"prompt_level_loose_acc_stderr,none": 0.021340853089940322,
"inst_level_loose_acc,none": 0.6618705035971223,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.22280966767371602,
"exact_match_stderr,none": 0.010883975817751943,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.41368078175895767,
"exact_match_stderr,none": 0.028153929859671776
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.18699186991869918,
"exact_match_stderr,none": 0.03530034023230448
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.10606060606060606,
"exact_match_stderr,none": 0.026902673595485146
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.09285714285714286,
"exact_match_stderr,none": 0.01737574359591369
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.2012987012987013,
"exact_match_stderr,none": 0.03241656299738023
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.32642487046632124,
"exact_match_stderr,none": 0.03384028621143298
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.08148148148148149,
"exact_match_stderr,none": 0.023633117677786884
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.44481382978723405,
"acc_stderr,none": 0.004530619574000465
},
"leaderboard_musr": {
"acc_norm,none": 0.4973544973544973,
"acc_norm_stderr,none": 0.018006570007816334,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.390625,
"acc_norm_stderr,none": 0.030552886284181364
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255664
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
bitmind/google-image-scraper___0-to-1000___animagine-xl-3.1 | bitmind | "2024-11-20T01:52:03Z" | 9 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T01:51:59Z" | ---
dataset_info:
features:
- name: image
dtype: image
splits:
- name: train
num_bytes: 102906599.779
num_examples: 1001
download_size: 102945280
dataset_size: 102906599.779
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
BBChicago/llm-router-test | BBChicago | "2024-11-20T04:35:46Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T02:08:34Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: label
dtype: int64
- name: year
dtype: int64
- name: month
dtype: int64
- name: day
dtype: int64
- name: hour
dtype: int64
- name: hour_sin
dtype: float64
- name: hour_cos
dtype: float64
- name: weekday
dtype: int64
- name: weekday_sin
dtype: float64
- name: weekday_cos
dtype: float64
- name: minute
dtype: int64
- name: second
dtype: int64
- name: is_weekend
dtype: int64
- name: quarter
dtype: int64
- name: user_name_embd_0
dtype: float64
- name: user_name_embd_1
dtype: float64
- name: user_name_embd_2
dtype: float64
- name: user_name_embd_3
dtype: float64
- name: user_name_embd_4
dtype: float64
- name: user_name_embd_5
dtype: float64
- name: user_name_embd_6
dtype: float64
- name: user_name_embd_7
dtype: float64
- name: user_name_embd_8
dtype: float64
- name: user_name_embd_9
dtype: float64
- name: user_name_embd_10
dtype: float64
- name: user_name_embd_11
dtype: float64
- name: user_name_embd_12
dtype: float64
- name: user_name_embd_13
dtype: float64
- name: user_name_embd_14
dtype: float64
- name: user_name_embd_15
dtype: float64
- name: user_name_embd_16
dtype: float64
- name: user_name_embd_17
dtype: float64
- name: user_name_embd_18
dtype: float64
- name: user_name_embd_19
dtype: float64
- name: user_name_embd_20
dtype: float64
- name: user_name_embd_21
dtype: float64
- name: user_name_embd_22
dtype: float64
- name: user_name_embd_23
dtype: float64
- name: user_name_embd_24
dtype: float64
- name: user_name_embd_25
dtype: float64
- name: user_name_embd_26
dtype: float64
- name: user_name_embd_27
dtype: float64
- name: user_name_embd_28
dtype: float64
- name: user_name_embd_29
dtype: float64
- name: user_name_embd_30
dtype: float64
- name: user_name_embd_31
dtype: float64
- name: user_name_embd_32
dtype: float64
- name: user_name_embd_33
dtype: float64
- name: user_name_embd_34
dtype: float64
- name: user_name_embd_35
dtype: float64
- name: user_name_embd_36
dtype: float64
- name: user_name_embd_37
dtype: float64
- name: user_name_embd_38
dtype: float64
- name: user_name_embd_39
dtype: float64
- name: user_name_embd_40
dtype: float64
- name: user_name_embd_41
dtype: float64
- name: user_name_embd_42
dtype: float64
- name: user_name_embd_43
dtype: float64
- name: user_name_embd_44
dtype: float64
- name: user_name_embd_45
dtype: float64
- name: user_name_embd_46
dtype: float64
- name: user_name_embd_47
dtype: float64
- name: user_name_embd_48
dtype: float64
- name: user_name_embd_49
dtype: float64
- name: user_name_embd_50
dtype: float64
- name: user_name_embd_51
dtype: float64
- name: user_name_embd_52
dtype: float64
- name: user_name_embd_53
dtype: float64
- name: user_name_embd_54
dtype: float64
- name: user_name_embd_55
dtype: float64
- name: user_name_embd_56
dtype: float64
- name: user_name_embd_57
dtype: float64
- name: user_name_embd_58
dtype: float64
- name: user_name_embd_59
dtype: float64
- name: user_name_embd_60
dtype: float64
- name: user_name_embd_61
dtype: float64
- name: user_name_embd_62
dtype: float64
- name: user_name_embd_63
dtype: float64
- name: user_name_embd_64
dtype: float64
- name: user_name_embd_65
dtype: float64
- name: user_name_embd_66
dtype: float64
- name: user_name_embd_67
dtype: float64
- name: user_name_embd_68
dtype: float64
- name: user_name_embd_69
dtype: float64
- name: user_name_embd_70
dtype: float64
- name: user_name_embd_71
dtype: float64
- name: user_name_embd_72
dtype: float64
- name: user_name_embd_73
dtype: float64
- name: user_name_embd_74
dtype: float64
- name: user_name_embd_75
dtype: float64
- name: user_name_embd_76
dtype: float64
- name: user_name_embd_77
dtype: float64
- name: user_name_embd_78
dtype: float64
- name: user_name_embd_79
dtype: float64
- name: user_name_embd_80
dtype: float64
- name: user_name_embd_81
dtype: float64
- name: user_name_embd_82
dtype: float64
- name: user_name_embd_83
dtype: float64
- name: user_name_embd_84
dtype: float64
- name: user_name_embd_85
dtype: float64
- name: user_name_embd_86
dtype: float64
- name: user_name_embd_87
dtype: float64
- name: user_name_embd_88
dtype: float64
- name: user_name_embd_89
dtype: float64
- name: user_name_embd_90
dtype: float64
- name: user_name_embd_91
dtype: float64
- name: user_name_embd_92
dtype: float64
- name: user_name_embd_93
dtype: float64
- name: user_name_embd_94
dtype: float64
- name: user_name_embd_95
dtype: float64
- name: user_name_embd_96
dtype: float64
- name: user_name_embd_97
dtype: float64
- name: user_name_embd_98
dtype: float64
- name: user_name_embd_99
dtype: float64
- name: user_name_embd_100
dtype: float64
- name: user_name_embd_101
dtype: float64
- name: user_name_embd_102
dtype: float64
- name: user_name_embd_103
dtype: float64
- name: user_name_embd_104
dtype: float64
- name: user_name_embd_105
dtype: float64
- name: user_name_embd_106
dtype: float64
- name: user_name_embd_107
dtype: float64
- name: user_name_embd_108
dtype: float64
- name: user_name_embd_109
dtype: float64
- name: user_name_embd_110
dtype: float64
- name: user_name_embd_111
dtype: float64
- name: user_name_embd_112
dtype: float64
- name: user_name_embd_113
dtype: float64
- name: user_name_embd_114
dtype: float64
- name: user_name_embd_115
dtype: float64
- name: user_name_embd_116
dtype: float64
- name: user_name_embd_117
dtype: float64
- name: user_name_embd_118
dtype: float64
- name: user_name_embd_119
dtype: float64
- name: user_name_embd_120
dtype: float64
- name: user_name_embd_121
dtype: float64
- name: user_name_embd_122
dtype: float64
- name: user_name_embd_123
dtype: float64
- name: user_name_embd_124
dtype: float64
- name: user_name_embd_125
dtype: float64
- name: user_name_embd_126
dtype: float64
- name: user_name_embd_127
dtype: float64
- name: user_name_embd_128
dtype: float64
- name: user_name_embd_129
dtype: float64
- name: user_name_embd_130
dtype: float64
- name: user_name_embd_131
dtype: float64
- name: user_name_embd_132
dtype: float64
- name: user_name_embd_133
dtype: float64
- name: user_name_embd_134
dtype: float64
- name: user_name_embd_135
dtype: float64
- name: user_name_embd_136
dtype: float64
- name: user_name_embd_137
dtype: float64
- name: user_name_embd_138
dtype: float64
- name: user_name_embd_139
dtype: float64
- name: user_name_embd_140
dtype: float64
- name: user_name_embd_141
dtype: float64
- name: user_name_embd_142
dtype: float64
- name: user_name_embd_143
dtype: float64
- name: user_name_embd_144
dtype: float64
- name: user_name_embd_145
dtype: float64
- name: user_name_embd_146
dtype: float64
- name: user_name_embd_147
dtype: float64
- name: user_name_embd_148
dtype: float64
- name: user_name_embd_149
dtype: float64
- name: user_name_embd_150
dtype: float64
- name: user_name_embd_151
dtype: float64
- name: user_name_embd_152
dtype: float64
- name: user_name_embd_153
dtype: float64
- name: user_name_embd_154
dtype: float64
- name: user_name_embd_155
dtype: float64
- name: user_name_embd_156
dtype: float64
- name: user_name_embd_157
dtype: float64
- name: user_name_embd_158
dtype: float64
- name: user_name_embd_159
dtype: float64
- name: user_name_embd_160
dtype: float64
- name: user_name_embd_161
dtype: float64
- name: user_name_embd_162
dtype: float64
- name: user_name_embd_163
dtype: float64
- name: user_name_embd_164
dtype: float64
- name: user_name_embd_165
dtype: float64
- name: user_name_embd_166
dtype: float64
- name: user_name_embd_167
dtype: float64
- name: user_name_embd_168
dtype: float64
- name: user_name_embd_169
dtype: float64
- name: user_name_embd_170
dtype: float64
- name: user_name_embd_171
dtype: float64
- name: user_name_embd_172
dtype: float64
- name: user_name_embd_173
dtype: float64
- name: user_name_embd_174
dtype: float64
- name: user_name_embd_175
dtype: float64
- name: user_name_embd_176
dtype: float64
- name: user_name_embd_177
dtype: float64
- name: user_name_embd_178
dtype: float64
- name: user_name_embd_179
dtype: float64
- name: user_name_embd_180
dtype: float64
- name: user_name_embd_181
dtype: float64
- name: user_name_embd_182
dtype: float64
- name: user_name_embd_183
dtype: float64
- name: user_name_embd_184
dtype: float64
- name: user_name_embd_185
dtype: float64
- name: user_name_embd_186
dtype: float64
- name: user_name_embd_187
dtype: float64
- name: user_name_embd_188
dtype: float64
- name: user_name_embd_189
dtype: float64
- name: user_name_embd_190
dtype: float64
- name: user_name_embd_191
dtype: float64
- name: user_name_embd_192
dtype: float64
- name: user_name_embd_193
dtype: float64
- name: user_name_embd_194
dtype: float64
- name: user_name_embd_195
dtype: float64
- name: user_name_embd_196
dtype: float64
- name: user_name_embd_197
dtype: float64
- name: user_name_embd_198
dtype: float64
- name: user_name_embd_199
dtype: float64
- name: user_name_embd_200
dtype: float64
- name: user_name_embd_201
dtype: float64
- name: user_name_embd_202
dtype: float64
- name: user_name_embd_203
dtype: float64
- name: user_name_embd_204
dtype: float64
- name: user_name_embd_205
dtype: float64
- name: user_name_embd_206
dtype: float64
- name: user_name_embd_207
dtype: float64
- name: user_name_embd_208
dtype: float64
- name: user_name_embd_209
dtype: float64
- name: user_name_embd_210
dtype: float64
- name: user_name_embd_211
dtype: float64
- name: user_name_embd_212
dtype: float64
- name: user_name_embd_213
dtype: float64
- name: user_name_embd_214
dtype: float64
- name: user_name_embd_215
dtype: float64
- name: user_name_embd_216
dtype: float64
- name: user_name_embd_217
dtype: float64
- name: user_name_embd_218
dtype: float64
- name: user_name_embd_219
dtype: float64
- name: user_name_embd_220
dtype: float64
- name: user_name_embd_221
dtype: float64
- name: user_name_embd_222
dtype: float64
- name: user_name_embd_223
dtype: float64
- name: user_name_embd_224
dtype: float64
- name: user_name_embd_225
dtype: float64
- name: user_name_embd_226
dtype: float64
- name: user_name_embd_227
dtype: float64
- name: user_name_embd_228
dtype: float64
- name: user_name_embd_229
dtype: float64
- name: user_name_embd_230
dtype: float64
- name: user_name_embd_231
dtype: float64
- name: user_name_embd_232
dtype: float64
- name: user_name_embd_233
dtype: float64
- name: user_name_embd_234
dtype: float64
- name: user_name_embd_235
dtype: float64
- name: user_name_embd_236
dtype: float64
- name: user_name_embd_237
dtype: float64
- name: user_name_embd_238
dtype: float64
- name: user_name_embd_239
dtype: float64
- name: user_name_embd_240
dtype: float64
- name: user_name_embd_241
dtype: float64
- name: user_name_embd_242
dtype: float64
- name: user_name_embd_243
dtype: float64
- name: user_name_embd_244
dtype: float64
- name: user_name_embd_245
dtype: float64
- name: user_name_embd_246
dtype: float64
- name: user_name_embd_247
dtype: float64
- name: user_name_embd_248
dtype: float64
- name: user_name_embd_249
dtype: float64
- name: user_name_embd_250
dtype: float64
- name: user_name_embd_251
dtype: float64
- name: user_name_embd_252
dtype: float64
- name: user_name_embd_253
dtype: float64
- name: user_name_embd_254
dtype: float64
- name: user_name_embd_255
dtype: float64
- name: user_name_embd_256
dtype: float64
- name: user_name_embd_257
dtype: float64
- name: user_name_embd_258
dtype: float64
- name: user_name_embd_259
dtype: float64
- name: user_name_embd_260
dtype: float64
- name: user_name_embd_261
dtype: float64
- name: user_name_embd_262
dtype: float64
- name: user_name_embd_263
dtype: float64
- name: user_name_embd_264
dtype: float64
- name: user_name_embd_265
dtype: float64
- name: user_name_embd_266
dtype: float64
- name: user_name_embd_267
dtype: float64
- name: user_name_embd_268
dtype: float64
- name: user_name_embd_269
dtype: float64
- name: user_name_embd_270
dtype: float64
- name: user_name_embd_271
dtype: float64
- name: user_name_embd_272
dtype: float64
- name: user_name_embd_273
dtype: float64
- name: user_name_embd_274
dtype: float64
- name: user_name_embd_275
dtype: float64
- name: user_name_embd_276
dtype: float64
- name: user_name_embd_277
dtype: float64
- name: user_name_embd_278
dtype: float64
- name: user_name_embd_279
dtype: float64
- name: user_name_embd_280
dtype: float64
- name: user_name_embd_281
dtype: float64
- name: user_name_embd_282
dtype: float64
- name: user_name_embd_283
dtype: float64
- name: user_name_embd_284
dtype: float64
- name: user_name_embd_285
dtype: float64
- name: user_name_embd_286
dtype: float64
- name: user_name_embd_287
dtype: float64
- name: user_name_embd_288
dtype: float64
- name: user_name_embd_289
dtype: float64
- name: user_name_embd_290
dtype: float64
- name: user_name_embd_291
dtype: float64
- name: user_name_embd_292
dtype: float64
- name: user_name_embd_293
dtype: float64
- name: user_name_embd_294
dtype: float64
- name: user_name_embd_295
dtype: float64
- name: user_name_embd_296
dtype: float64
- name: user_name_embd_297
dtype: float64
- name: user_name_embd_298
dtype: float64
- name: user_name_embd_299
dtype: float64
- name: user_name_embd_300
dtype: float64
- name: user_name_embd_301
dtype: float64
- name: user_name_embd_302
dtype: float64
- name: user_name_embd_303
dtype: float64
- name: user_name_embd_304
dtype: float64
- name: user_name_embd_305
dtype: float64
- name: user_name_embd_306
dtype: float64
- name: user_name_embd_307
dtype: float64
- name: user_name_embd_308
dtype: float64
- name: user_name_embd_309
dtype: float64
- name: user_name_embd_310
dtype: float64
- name: user_name_embd_311
dtype: float64
- name: user_name_embd_312
dtype: float64
- name: user_name_embd_313
dtype: float64
- name: user_name_embd_314
dtype: float64
- name: user_name_embd_315
dtype: float64
- name: user_name_embd_316
dtype: float64
- name: user_name_embd_317
dtype: float64
- name: user_name_embd_318
dtype: float64
- name: user_name_embd_319
dtype: float64
- name: user_name_embd_320
dtype: float64
- name: user_name_embd_321
dtype: float64
- name: user_name_embd_322
dtype: float64
- name: user_name_embd_323
dtype: float64
- name: user_name_embd_324
dtype: float64
- name: user_name_embd_325
dtype: float64
- name: user_name_embd_326
dtype: float64
- name: user_name_embd_327
dtype: float64
- name: user_name_embd_328
dtype: float64
- name: user_name_embd_329
dtype: float64
- name: user_name_embd_330
dtype: float64
- name: user_name_embd_331
dtype: float64
- name: user_name_embd_332
dtype: float64
- name: user_name_embd_333
dtype: float64
- name: user_name_embd_334
dtype: float64
- name: user_name_embd_335
dtype: float64
- name: user_name_embd_336
dtype: float64
- name: user_name_embd_337
dtype: float64
- name: user_name_embd_338
dtype: float64
- name: user_name_embd_339
dtype: float64
- name: user_name_embd_340
dtype: float64
- name: user_name_embd_341
dtype: float64
- name: user_name_embd_342
dtype: float64
- name: user_name_embd_343
dtype: float64
- name: user_name_embd_344
dtype: float64
- name: user_name_embd_345
dtype: float64
- name: user_name_embd_346
dtype: float64
- name: user_name_embd_347
dtype: float64
- name: user_name_embd_348
dtype: float64
- name: user_name_embd_349
dtype: float64
- name: user_name_embd_350
dtype: float64
- name: user_name_embd_351
dtype: float64
- name: user_name_embd_352
dtype: float64
- name: user_name_embd_353
dtype: float64
- name: user_name_embd_354
dtype: float64
- name: user_name_embd_355
dtype: float64
- name: user_name_embd_356
dtype: float64
- name: user_name_embd_357
dtype: float64
- name: user_name_embd_358
dtype: float64
- name: user_name_embd_359
dtype: float64
- name: user_name_embd_360
dtype: float64
- name: user_name_embd_361
dtype: float64
- name: user_name_embd_362
dtype: float64
- name: user_name_embd_363
dtype: float64
- name: user_name_embd_364
dtype: float64
- name: user_name_embd_365
dtype: float64
- name: user_name_embd_366
dtype: float64
- name: user_name_embd_367
dtype: float64
- name: user_name_embd_368
dtype: float64
- name: user_name_embd_369
dtype: float64
- name: user_name_embd_370
dtype: float64
- name: user_name_embd_371
dtype: float64
- name: user_name_embd_372
dtype: float64
- name: user_name_embd_373
dtype: float64
- name: user_name_embd_374
dtype: float64
- name: user_name_embd_375
dtype: float64
- name: user_name_embd_376
dtype: float64
- name: user_name_embd_377
dtype: float64
- name: user_name_embd_378
dtype: float64
- name: user_name_embd_379
dtype: float64
- name: user_name_embd_380
dtype: float64
- name: user_name_embd_381
dtype: float64
- name: user_name_embd_382
dtype: float64
- name: user_name_embd_383
dtype: float64
- name: bot_name_embd_0
dtype: float64
- name: bot_name_embd_1
dtype: float64
- name: bot_name_embd_2
dtype: float64
- name: bot_name_embd_3
dtype: float64
- name: bot_name_embd_4
dtype: float64
- name: bot_name_embd_5
dtype: float64
- name: bot_name_embd_6
dtype: float64
- name: bot_name_embd_7
dtype: float64
- name: bot_name_embd_8
dtype: float64
- name: bot_name_embd_9
dtype: float64
- name: bot_name_embd_10
dtype: float64
- name: bot_name_embd_11
dtype: float64
- name: bot_name_embd_12
dtype: float64
- name: bot_name_embd_13
dtype: float64
- name: bot_name_embd_14
dtype: float64
- name: bot_name_embd_15
dtype: float64
- name: bot_name_embd_16
dtype: float64
- name: bot_name_embd_17
dtype: float64
- name: bot_name_embd_18
dtype: float64
- name: bot_name_embd_19
dtype: float64
- name: bot_name_embd_20
dtype: float64
- name: bot_name_embd_21
dtype: float64
- name: bot_name_embd_22
dtype: float64
- name: bot_name_embd_23
dtype: float64
- name: bot_name_embd_24
dtype: float64
- name: bot_name_embd_25
dtype: float64
- name: bot_name_embd_26
dtype: float64
- name: bot_name_embd_27
dtype: float64
- name: bot_name_embd_28
dtype: float64
- name: bot_name_embd_29
dtype: float64
- name: bot_name_embd_30
dtype: float64
- name: bot_name_embd_31
dtype: float64
- name: bot_name_embd_32
dtype: float64
- name: bot_name_embd_33
dtype: float64
- name: bot_name_embd_34
dtype: float64
- name: bot_name_embd_35
dtype: float64
- name: bot_name_embd_36
dtype: float64
- name: bot_name_embd_37
dtype: float64
- name: bot_name_embd_38
dtype: float64
- name: bot_name_embd_39
dtype: float64
- name: bot_name_embd_40
dtype: float64
- name: bot_name_embd_41
dtype: float64
- name: bot_name_embd_42
dtype: float64
- name: bot_name_embd_43
dtype: float64
- name: bot_name_embd_44
dtype: float64
- name: bot_name_embd_45
dtype: float64
- name: bot_name_embd_46
dtype: float64
- name: bot_name_embd_47
dtype: float64
- name: bot_name_embd_48
dtype: float64
- name: bot_name_embd_49
dtype: float64
- name: bot_name_embd_50
dtype: float64
- name: bot_name_embd_51
dtype: float64
- name: bot_name_embd_52
dtype: float64
- name: bot_name_embd_53
dtype: float64
- name: bot_name_embd_54
dtype: float64
- name: bot_name_embd_55
dtype: float64
- name: bot_name_embd_56
dtype: float64
- name: bot_name_embd_57
dtype: float64
- name: bot_name_embd_58
dtype: float64
- name: bot_name_embd_59
dtype: float64
- name: bot_name_embd_60
dtype: float64
- name: bot_name_embd_61
dtype: float64
- name: bot_name_embd_62
dtype: float64
- name: bot_name_embd_63
dtype: float64
- name: bot_name_embd_64
dtype: float64
- name: bot_name_embd_65
dtype: float64
- name: bot_name_embd_66
dtype: float64
- name: bot_name_embd_67
dtype: float64
- name: bot_name_embd_68
dtype: float64
- name: bot_name_embd_69
dtype: float64
- name: bot_name_embd_70
dtype: float64
- name: bot_name_embd_71
dtype: float64
- name: bot_name_embd_72
dtype: float64
- name: bot_name_embd_73
dtype: float64
- name: bot_name_embd_74
dtype: float64
- name: bot_name_embd_75
dtype: float64
- name: bot_name_embd_76
dtype: float64
- name: bot_name_embd_77
dtype: float64
- name: bot_name_embd_78
dtype: float64
- name: bot_name_embd_79
dtype: float64
- name: bot_name_embd_80
dtype: float64
- name: bot_name_embd_81
dtype: float64
- name: bot_name_embd_82
dtype: float64
- name: bot_name_embd_83
dtype: float64
- name: bot_name_embd_84
dtype: float64
- name: bot_name_embd_85
dtype: float64
- name: bot_name_embd_86
dtype: float64
- name: bot_name_embd_87
dtype: float64
- name: bot_name_embd_88
dtype: float64
- name: bot_name_embd_89
dtype: float64
- name: bot_name_embd_90
dtype: float64
- name: bot_name_embd_91
dtype: float64
- name: bot_name_embd_92
dtype: float64
- name: bot_name_embd_93
dtype: float64
- name: bot_name_embd_94
dtype: float64
- name: bot_name_embd_95
dtype: float64
- name: bot_name_embd_96
dtype: float64
- name: bot_name_embd_97
dtype: float64
- name: bot_name_embd_98
dtype: float64
- name: bot_name_embd_99
dtype: float64
- name: bot_name_embd_100
dtype: float64
- name: bot_name_embd_101
dtype: float64
- name: bot_name_embd_102
dtype: float64
- name: bot_name_embd_103
dtype: float64
- name: bot_name_embd_104
dtype: float64
- name: bot_name_embd_105
dtype: float64
- name: bot_name_embd_106
dtype: float64
- name: bot_name_embd_107
dtype: float64
- name: bot_name_embd_108
dtype: float64
- name: bot_name_embd_109
dtype: float64
- name: bot_name_embd_110
dtype: float64
- name: bot_name_embd_111
dtype: float64
- name: bot_name_embd_112
dtype: float64
- name: bot_name_embd_113
dtype: float64
- name: bot_name_embd_114
dtype: float64
- name: bot_name_embd_115
dtype: float64
- name: bot_name_embd_116
dtype: float64
- name: bot_name_embd_117
dtype: float64
- name: bot_name_embd_118
dtype: float64
- name: bot_name_embd_119
dtype: float64
- name: bot_name_embd_120
dtype: float64
- name: bot_name_embd_121
dtype: float64
- name: bot_name_embd_122
dtype: float64
- name: bot_name_embd_123
dtype: float64
- name: bot_name_embd_124
dtype: float64
- name: bot_name_embd_125
dtype: float64
- name: bot_name_embd_126
dtype: float64
- name: bot_name_embd_127
dtype: float64
- name: bot_name_embd_128
dtype: float64
- name: bot_name_embd_129
dtype: float64
- name: bot_name_embd_130
dtype: float64
- name: bot_name_embd_131
dtype: float64
- name: bot_name_embd_132
dtype: float64
- name: bot_name_embd_133
dtype: float64
- name: bot_name_embd_134
dtype: float64
- name: bot_name_embd_135
dtype: float64
- name: bot_name_embd_136
dtype: float64
- name: bot_name_embd_137
dtype: float64
- name: bot_name_embd_138
dtype: float64
- name: bot_name_embd_139
dtype: float64
- name: bot_name_embd_140
dtype: float64
- name: bot_name_embd_141
dtype: float64
- name: bot_name_embd_142
dtype: float64
- name: bot_name_embd_143
dtype: float64
- name: bot_name_embd_144
dtype: float64
- name: bot_name_embd_145
dtype: float64
- name: bot_name_embd_146
dtype: float64
- name: bot_name_embd_147
dtype: float64
- name: bot_name_embd_148
dtype: float64
- name: bot_name_embd_149
dtype: float64
- name: bot_name_embd_150
dtype: float64
- name: bot_name_embd_151
dtype: float64
- name: bot_name_embd_152
dtype: float64
- name: bot_name_embd_153
dtype: float64
- name: bot_name_embd_154
dtype: float64
- name: bot_name_embd_155
dtype: float64
- name: bot_name_embd_156
dtype: float64
- name: bot_name_embd_157
dtype: float64
- name: bot_name_embd_158
dtype: float64
- name: bot_name_embd_159
dtype: float64
- name: bot_name_embd_160
dtype: float64
- name: bot_name_embd_161
dtype: float64
- name: bot_name_embd_162
dtype: float64
- name: bot_name_embd_163
dtype: float64
- name: bot_name_embd_164
dtype: float64
- name: bot_name_embd_165
dtype: float64
- name: bot_name_embd_166
dtype: float64
- name: bot_name_embd_167
dtype: float64
- name: bot_name_embd_168
dtype: float64
- name: bot_name_embd_169
dtype: float64
- name: bot_name_embd_170
dtype: float64
- name: bot_name_embd_171
dtype: float64
- name: bot_name_embd_172
dtype: float64
- name: bot_name_embd_173
dtype: float64
- name: bot_name_embd_174
dtype: float64
- name: bot_name_embd_175
dtype: float64
- name: bot_name_embd_176
dtype: float64
- name: bot_name_embd_177
dtype: float64
- name: bot_name_embd_178
dtype: float64
- name: bot_name_embd_179
dtype: float64
- name: bot_name_embd_180
dtype: float64
- name: bot_name_embd_181
dtype: float64
- name: bot_name_embd_182
dtype: float64
- name: bot_name_embd_183
dtype: float64
- name: bot_name_embd_184
dtype: float64
- name: bot_name_embd_185
dtype: float64
- name: bot_name_embd_186
dtype: float64
- name: bot_name_embd_187
dtype: float64
- name: bot_name_embd_188
dtype: float64
- name: bot_name_embd_189
dtype: float64
- name: bot_name_embd_190
dtype: float64
- name: bot_name_embd_191
dtype: float64
- name: bot_name_embd_192
dtype: float64
- name: bot_name_embd_193
dtype: float64
- name: bot_name_embd_194
dtype: float64
- name: bot_name_embd_195
dtype: float64
- name: bot_name_embd_196
dtype: float64
- name: bot_name_embd_197
dtype: float64
- name: bot_name_embd_198
dtype: float64
- name: bot_name_embd_199
dtype: float64
- name: bot_name_embd_200
dtype: float64
- name: bot_name_embd_201
dtype: float64
- name: bot_name_embd_202
dtype: float64
- name: bot_name_embd_203
dtype: float64
- name: bot_name_embd_204
dtype: float64
- name: bot_name_embd_205
dtype: float64
- name: bot_name_embd_206
dtype: float64
- name: bot_name_embd_207
dtype: float64
- name: bot_name_embd_208
dtype: float64
- name: bot_name_embd_209
dtype: float64
- name: bot_name_embd_210
dtype: float64
- name: bot_name_embd_211
dtype: float64
- name: bot_name_embd_212
dtype: float64
- name: bot_name_embd_213
dtype: float64
- name: bot_name_embd_214
dtype: float64
- name: bot_name_embd_215
dtype: float64
- name: bot_name_embd_216
dtype: float64
- name: bot_name_embd_217
dtype: float64
- name: bot_name_embd_218
dtype: float64
- name: bot_name_embd_219
dtype: float64
- name: bot_name_embd_220
dtype: float64
- name: bot_name_embd_221
dtype: float64
- name: bot_name_embd_222
dtype: float64
- name: bot_name_embd_223
dtype: float64
- name: bot_name_embd_224
dtype: float64
- name: bot_name_embd_225
dtype: float64
- name: bot_name_embd_226
dtype: float64
- name: bot_name_embd_227
dtype: float64
- name: bot_name_embd_228
dtype: float64
- name: bot_name_embd_229
dtype: float64
- name: bot_name_embd_230
dtype: float64
- name: bot_name_embd_231
dtype: float64
- name: bot_name_embd_232
dtype: float64
- name: bot_name_embd_233
dtype: float64
- name: bot_name_embd_234
dtype: float64
- name: bot_name_embd_235
dtype: float64
- name: bot_name_embd_236
dtype: float64
- name: bot_name_embd_237
dtype: float64
- name: bot_name_embd_238
dtype: float64
- name: bot_name_embd_239
dtype: float64
- name: bot_name_embd_240
dtype: float64
- name: bot_name_embd_241
dtype: float64
- name: bot_name_embd_242
dtype: float64
- name: bot_name_embd_243
dtype: float64
- name: bot_name_embd_244
dtype: float64
- name: bot_name_embd_245
dtype: float64
- name: bot_name_embd_246
dtype: float64
- name: bot_name_embd_247
dtype: float64
- name: bot_name_embd_248
dtype: float64
- name: bot_name_embd_249
dtype: float64
- name: bot_name_embd_250
dtype: float64
- name: bot_name_embd_251
dtype: float64
- name: bot_name_embd_252
dtype: float64
- name: bot_name_embd_253
dtype: float64
- name: bot_name_embd_254
dtype: float64
- name: bot_name_embd_255
dtype: float64
- name: bot_name_embd_256
dtype: float64
- name: bot_name_embd_257
dtype: float64
- name: bot_name_embd_258
dtype: float64
- name: bot_name_embd_259
dtype: float64
- name: bot_name_embd_260
dtype: float64
- name: bot_name_embd_261
dtype: float64
- name: bot_name_embd_262
dtype: float64
- name: bot_name_embd_263
dtype: float64
- name: bot_name_embd_264
dtype: float64
- name: bot_name_embd_265
dtype: float64
- name: bot_name_embd_266
dtype: float64
- name: bot_name_embd_267
dtype: float64
- name: bot_name_embd_268
dtype: float64
- name: bot_name_embd_269
dtype: float64
- name: bot_name_embd_270
dtype: float64
- name: bot_name_embd_271
dtype: float64
- name: bot_name_embd_272
dtype: float64
- name: bot_name_embd_273
dtype: float64
- name: bot_name_embd_274
dtype: float64
- name: bot_name_embd_275
dtype: float64
- name: bot_name_embd_276
dtype: float64
- name: bot_name_embd_277
dtype: float64
- name: bot_name_embd_278
dtype: float64
- name: bot_name_embd_279
dtype: float64
- name: bot_name_embd_280
dtype: float64
- name: bot_name_embd_281
dtype: float64
- name: bot_name_embd_282
dtype: float64
- name: bot_name_embd_283
dtype: float64
- name: bot_name_embd_284
dtype: float64
- name: bot_name_embd_285
dtype: float64
- name: bot_name_embd_286
dtype: float64
- name: bot_name_embd_287
dtype: float64
- name: bot_name_embd_288
dtype: float64
- name: bot_name_embd_289
dtype: float64
- name: bot_name_embd_290
dtype: float64
- name: bot_name_embd_291
dtype: float64
- name: bot_name_embd_292
dtype: float64
- name: bot_name_embd_293
dtype: float64
- name: bot_name_embd_294
dtype: float64
- name: bot_name_embd_295
dtype: float64
- name: bot_name_embd_296
dtype: float64
- name: bot_name_embd_297
dtype: float64
- name: bot_name_embd_298
dtype: float64
- name: bot_name_embd_299
dtype: float64
- name: bot_name_embd_300
dtype: float64
- name: bot_name_embd_301
dtype: float64
- name: bot_name_embd_302
dtype: float64
- name: bot_name_embd_303
dtype: float64
- name: bot_name_embd_304
dtype: float64
- name: bot_name_embd_305
dtype: float64
- name: bot_name_embd_306
dtype: float64
- name: bot_name_embd_307
dtype: float64
- name: bot_name_embd_308
dtype: float64
- name: bot_name_embd_309
dtype: float64
- name: bot_name_embd_310
dtype: float64
- name: bot_name_embd_311
dtype: float64
- name: bot_name_embd_312
dtype: float64
- name: bot_name_embd_313
dtype: float64
- name: bot_name_embd_314
dtype: float64
- name: bot_name_embd_315
dtype: float64
- name: bot_name_embd_316
dtype: float64
- name: bot_name_embd_317
dtype: float64
- name: bot_name_embd_318
dtype: float64
- name: bot_name_embd_319
dtype: float64
- name: bot_name_embd_320
dtype: float64
- name: bot_name_embd_321
dtype: float64
- name: bot_name_embd_322
dtype: float64
- name: bot_name_embd_323
dtype: float64
- name: bot_name_embd_324
dtype: float64
- name: bot_name_embd_325
dtype: float64
- name: bot_name_embd_326
dtype: float64
- name: bot_name_embd_327
dtype: float64
- name: bot_name_embd_328
dtype: float64
- name: bot_name_embd_329
dtype: float64
- name: bot_name_embd_330
dtype: float64
- name: bot_name_embd_331
dtype: float64
- name: bot_name_embd_332
dtype: float64
- name: bot_name_embd_333
dtype: float64
- name: bot_name_embd_334
dtype: float64
- name: bot_name_embd_335
dtype: float64
- name: bot_name_embd_336
dtype: float64
- name: bot_name_embd_337
dtype: float64
- name: bot_name_embd_338
dtype: float64
- name: bot_name_embd_339
dtype: float64
- name: bot_name_embd_340
dtype: float64
- name: bot_name_embd_341
dtype: float64
- name: bot_name_embd_342
dtype: float64
- name: bot_name_embd_343
dtype: float64
- name: bot_name_embd_344
dtype: float64
- name: bot_name_embd_345
dtype: float64
- name: bot_name_embd_346
dtype: float64
- name: bot_name_embd_347
dtype: float64
- name: bot_name_embd_348
dtype: float64
- name: bot_name_embd_349
dtype: float64
- name: bot_name_embd_350
dtype: float64
- name: bot_name_embd_351
dtype: float64
- name: bot_name_embd_352
dtype: float64
- name: bot_name_embd_353
dtype: float64
- name: bot_name_embd_354
dtype: float64
- name: bot_name_embd_355
dtype: float64
- name: bot_name_embd_356
dtype: float64
- name: bot_name_embd_357
dtype: float64
- name: bot_name_embd_358
dtype: float64
- name: bot_name_embd_359
dtype: float64
- name: bot_name_embd_360
dtype: float64
- name: bot_name_embd_361
dtype: float64
- name: bot_name_embd_362
dtype: float64
- name: bot_name_embd_363
dtype: float64
- name: bot_name_embd_364
dtype: float64
- name: bot_name_embd_365
dtype: float64
- name: bot_name_embd_366
dtype: float64
- name: bot_name_embd_367
dtype: float64
- name: bot_name_embd_368
dtype: float64
- name: bot_name_embd_369
dtype: float64
- name: bot_name_embd_370
dtype: float64
- name: bot_name_embd_371
dtype: float64
- name: bot_name_embd_372
dtype: float64
- name: bot_name_embd_373
dtype: float64
- name: bot_name_embd_374
dtype: float64
- name: bot_name_embd_375
dtype: float64
- name: bot_name_embd_376
dtype: float64
- name: bot_name_embd_377
dtype: float64
- name: bot_name_embd_378
dtype: float64
- name: bot_name_embd_379
dtype: float64
- name: bot_name_embd_380
dtype: float64
- name: bot_name_embd_381
dtype: float64
- name: bot_name_embd_382
dtype: float64
- name: bot_name_embd_383
dtype: float64
- name: bot_persona_embd_0
dtype: float64
- name: bot_persona_embd_1
dtype: float64
- name: bot_persona_embd_2
dtype: float64
- name: bot_persona_embd_3
dtype: float64
- name: bot_persona_embd_4
dtype: float64
- name: bot_persona_embd_5
dtype: float64
- name: bot_persona_embd_6
dtype: float64
- name: bot_persona_embd_7
dtype: float64
- name: bot_persona_embd_8
dtype: float64
- name: bot_persona_embd_9
dtype: float64
- name: bot_persona_embd_10
dtype: float64
- name: bot_persona_embd_11
dtype: float64
- name: bot_persona_embd_12
dtype: float64
- name: bot_persona_embd_13
dtype: float64
- name: bot_persona_embd_14
dtype: float64
- name: bot_persona_embd_15
dtype: float64
- name: bot_persona_embd_16
dtype: float64
- name: bot_persona_embd_17
dtype: float64
- name: bot_persona_embd_18
dtype: float64
- name: bot_persona_embd_19
dtype: float64
- name: bot_persona_embd_20
dtype: float64
- name: bot_persona_embd_21
dtype: float64
- name: bot_persona_embd_22
dtype: float64
- name: bot_persona_embd_23
dtype: float64
- name: bot_persona_embd_24
dtype: float64
- name: bot_persona_embd_25
dtype: float64
- name: bot_persona_embd_26
dtype: float64
- name: bot_persona_embd_27
dtype: float64
- name: bot_persona_embd_28
dtype: float64
- name: bot_persona_embd_29
dtype: float64
- name: bot_persona_embd_30
dtype: float64
- name: bot_persona_embd_31
dtype: float64
- name: bot_persona_embd_32
dtype: float64
- name: bot_persona_embd_33
dtype: float64
- name: bot_persona_embd_34
dtype: float64
- name: bot_persona_embd_35
dtype: float64
- name: bot_persona_embd_36
dtype: float64
- name: bot_persona_embd_37
dtype: float64
- name: bot_persona_embd_38
dtype: float64
- name: bot_persona_embd_39
dtype: float64
- name: bot_persona_embd_40
dtype: float64
- name: bot_persona_embd_41
dtype: float64
- name: bot_persona_embd_42
dtype: float64
- name: bot_persona_embd_43
dtype: float64
- name: bot_persona_embd_44
dtype: float64
- name: bot_persona_embd_45
dtype: float64
- name: bot_persona_embd_46
dtype: float64
- name: bot_persona_embd_47
dtype: float64
- name: bot_persona_embd_48
dtype: float64
- name: bot_persona_embd_49
dtype: float64
- name: bot_persona_embd_50
dtype: float64
- name: bot_persona_embd_51
dtype: float64
- name: bot_persona_embd_52
dtype: float64
- name: bot_persona_embd_53
dtype: float64
- name: bot_persona_embd_54
dtype: float64
- name: bot_persona_embd_55
dtype: float64
- name: bot_persona_embd_56
dtype: float64
- name: bot_persona_embd_57
dtype: float64
- name: bot_persona_embd_58
dtype: float64
- name: bot_persona_embd_59
dtype: float64
- name: bot_persona_embd_60
dtype: float64
- name: bot_persona_embd_61
dtype: float64
- name: bot_persona_embd_62
dtype: float64
- name: bot_persona_embd_63
dtype: float64
- name: bot_persona_embd_64
dtype: float64
- name: bot_persona_embd_65
dtype: float64
- name: bot_persona_embd_66
dtype: float64
- name: bot_persona_embd_67
dtype: float64
- name: bot_persona_embd_68
dtype: float64
- name: bot_persona_embd_69
dtype: float64
- name: bot_persona_embd_70
dtype: float64
- name: bot_persona_embd_71
dtype: float64
- name: bot_persona_embd_72
dtype: float64
- name: bot_persona_embd_73
dtype: float64
- name: bot_persona_embd_74
dtype: float64
- name: bot_persona_embd_75
dtype: float64
- name: bot_persona_embd_76
dtype: float64
- name: bot_persona_embd_77
dtype: float64
- name: bot_persona_embd_78
dtype: float64
- name: bot_persona_embd_79
dtype: float64
- name: bot_persona_embd_80
dtype: float64
- name: bot_persona_embd_81
dtype: float64
- name: bot_persona_embd_82
dtype: float64
- name: bot_persona_embd_83
dtype: float64
- name: bot_persona_embd_84
dtype: float64
- name: bot_persona_embd_85
dtype: float64
- name: bot_persona_embd_86
dtype: float64
- name: bot_persona_embd_87
dtype: float64
- name: bot_persona_embd_88
dtype: float64
- name: bot_persona_embd_89
dtype: float64
- name: bot_persona_embd_90
dtype: float64
- name: bot_persona_embd_91
dtype: float64
- name: bot_persona_embd_92
dtype: float64
- name: bot_persona_embd_93
dtype: float64
- name: bot_persona_embd_94
dtype: float64
- name: bot_persona_embd_95
dtype: float64
- name: bot_persona_embd_96
dtype: float64
- name: bot_persona_embd_97
dtype: float64
- name: bot_persona_embd_98
dtype: float64
- name: bot_persona_embd_99
dtype: float64
- name: bot_persona_embd_100
dtype: float64
- name: bot_persona_embd_101
dtype: float64
- name: bot_persona_embd_102
dtype: float64
- name: bot_persona_embd_103
dtype: float64
- name: bot_persona_embd_104
dtype: float64
- name: bot_persona_embd_105
dtype: float64
- name: bot_persona_embd_106
dtype: float64
- name: bot_persona_embd_107
dtype: float64
- name: bot_persona_embd_108
dtype: float64
- name: bot_persona_embd_109
dtype: float64
- name: bot_persona_embd_110
dtype: float64
- name: bot_persona_embd_111
dtype: float64
- name: bot_persona_embd_112
dtype: float64
- name: bot_persona_embd_113
dtype: float64
- name: bot_persona_embd_114
dtype: float64
- name: bot_persona_embd_115
dtype: float64
- name: bot_persona_embd_116
dtype: float64
- name: bot_persona_embd_117
dtype: float64
- name: bot_persona_embd_118
dtype: float64
- name: bot_persona_embd_119
dtype: float64
- name: bot_persona_embd_120
dtype: float64
- name: bot_persona_embd_121
dtype: float64
- name: bot_persona_embd_122
dtype: float64
- name: bot_persona_embd_123
dtype: float64
- name: bot_persona_embd_124
dtype: float64
- name: bot_persona_embd_125
dtype: float64
- name: bot_persona_embd_126
dtype: float64
- name: bot_persona_embd_127
dtype: float64
- name: bot_persona_embd_128
dtype: float64
- name: bot_persona_embd_129
dtype: float64
- name: bot_persona_embd_130
dtype: float64
- name: bot_persona_embd_131
dtype: float64
- name: bot_persona_embd_132
dtype: float64
- name: bot_persona_embd_133
dtype: float64
- name: bot_persona_embd_134
dtype: float64
- name: bot_persona_embd_135
dtype: float64
- name: bot_persona_embd_136
dtype: float64
- name: bot_persona_embd_137
dtype: float64
- name: bot_persona_embd_138
dtype: float64
- name: bot_persona_embd_139
dtype: float64
- name: bot_persona_embd_140
dtype: float64
- name: bot_persona_embd_141
dtype: float64
- name: bot_persona_embd_142
dtype: float64
- name: bot_persona_embd_143
dtype: float64
- name: bot_persona_embd_144
dtype: float64
- name: bot_persona_embd_145
dtype: float64
- name: bot_persona_embd_146
dtype: float64
- name: bot_persona_embd_147
dtype: float64
- name: bot_persona_embd_148
dtype: float64
- name: bot_persona_embd_149
dtype: float64
- name: bot_persona_embd_150
dtype: float64
- name: bot_persona_embd_151
dtype: float64
- name: bot_persona_embd_152
dtype: float64
- name: bot_persona_embd_153
dtype: float64
- name: bot_persona_embd_154
dtype: float64
- name: bot_persona_embd_155
dtype: float64
- name: bot_persona_embd_156
dtype: float64
- name: bot_persona_embd_157
dtype: float64
- name: bot_persona_embd_158
dtype: float64
- name: bot_persona_embd_159
dtype: float64
- name: bot_persona_embd_160
dtype: float64
- name: bot_persona_embd_161
dtype: float64
- name: bot_persona_embd_162
dtype: float64
- name: bot_persona_embd_163
dtype: float64
- name: bot_persona_embd_164
dtype: float64
- name: bot_persona_embd_165
dtype: float64
- name: bot_persona_embd_166
dtype: float64
- name: bot_persona_embd_167
dtype: float64
- name: bot_persona_embd_168
dtype: float64
- name: bot_persona_embd_169
dtype: float64
- name: bot_persona_embd_170
dtype: float64
- name: bot_persona_embd_171
dtype: float64
- name: bot_persona_embd_172
dtype: float64
- name: bot_persona_embd_173
dtype: float64
- name: bot_persona_embd_174
dtype: float64
- name: bot_persona_embd_175
dtype: float64
- name: bot_persona_embd_176
dtype: float64
- name: bot_persona_embd_177
dtype: float64
- name: bot_persona_embd_178
dtype: float64
- name: bot_persona_embd_179
dtype: float64
- name: bot_persona_embd_180
dtype: float64
- name: bot_persona_embd_181
dtype: float64
- name: bot_persona_embd_182
dtype: float64
- name: bot_persona_embd_183
dtype: float64
- name: bot_persona_embd_184
dtype: float64
- name: bot_persona_embd_185
dtype: float64
- name: bot_persona_embd_186
dtype: float64
- name: bot_persona_embd_187
dtype: float64
- name: bot_persona_embd_188
dtype: float64
- name: bot_persona_embd_189
dtype: float64
- name: bot_persona_embd_190
dtype: float64
- name: bot_persona_embd_191
dtype: float64
- name: bot_persona_embd_192
dtype: float64
- name: bot_persona_embd_193
dtype: float64
- name: bot_persona_embd_194
dtype: float64
- name: bot_persona_embd_195
dtype: float64
- name: bot_persona_embd_196
dtype: float64
- name: bot_persona_embd_197
dtype: float64
- name: bot_persona_embd_198
dtype: float64
- name: bot_persona_embd_199
dtype: float64
- name: bot_persona_embd_200
dtype: float64
- name: bot_persona_embd_201
dtype: float64
- name: bot_persona_embd_202
dtype: float64
- name: bot_persona_embd_203
dtype: float64
- name: bot_persona_embd_204
dtype: float64
- name: bot_persona_embd_205
dtype: float64
- name: bot_persona_embd_206
dtype: float64
- name: bot_persona_embd_207
dtype: float64
- name: bot_persona_embd_208
dtype: float64
- name: bot_persona_embd_209
dtype: float64
- name: bot_persona_embd_210
dtype: float64
- name: bot_persona_embd_211
dtype: float64
- name: bot_persona_embd_212
dtype: float64
- name: bot_persona_embd_213
dtype: float64
- name: bot_persona_embd_214
dtype: float64
- name: bot_persona_embd_215
dtype: float64
- name: bot_persona_embd_216
dtype: float64
- name: bot_persona_embd_217
dtype: float64
- name: bot_persona_embd_218
dtype: float64
- name: bot_persona_embd_219
dtype: float64
- name: bot_persona_embd_220
dtype: float64
- name: bot_persona_embd_221
dtype: float64
- name: bot_persona_embd_222
dtype: float64
- name: bot_persona_embd_223
dtype: float64
- name: bot_persona_embd_224
dtype: float64
- name: bot_persona_embd_225
dtype: float64
- name: bot_persona_embd_226
dtype: float64
- name: bot_persona_embd_227
dtype: float64
- name: bot_persona_embd_228
dtype: float64
- name: bot_persona_embd_229
dtype: float64
- name: bot_persona_embd_230
dtype: float64
- name: bot_persona_embd_231
dtype: float64
- name: bot_persona_embd_232
dtype: float64
- name: bot_persona_embd_233
dtype: float64
- name: bot_persona_embd_234
dtype: float64
- name: bot_persona_embd_235
dtype: float64
- name: bot_persona_embd_236
dtype: float64
- name: bot_persona_embd_237
dtype: float64
- name: bot_persona_embd_238
dtype: float64
- name: bot_persona_embd_239
dtype: float64
- name: bot_persona_embd_240
dtype: float64
- name: bot_persona_embd_241
dtype: float64
- name: bot_persona_embd_242
dtype: float64
- name: bot_persona_embd_243
dtype: float64
- name: bot_persona_embd_244
dtype: float64
- name: bot_persona_embd_245
dtype: float64
- name: bot_persona_embd_246
dtype: float64
- name: bot_persona_embd_247
dtype: float64
- name: bot_persona_embd_248
dtype: float64
- name: bot_persona_embd_249
dtype: float64
- name: bot_persona_embd_250
dtype: float64
- name: bot_persona_embd_251
dtype: float64
- name: bot_persona_embd_252
dtype: float64
- name: bot_persona_embd_253
dtype: float64
- name: bot_persona_embd_254
dtype: float64
- name: bot_persona_embd_255
dtype: float64
- name: bot_persona_embd_256
dtype: float64
- name: bot_persona_embd_257
dtype: float64
- name: bot_persona_embd_258
dtype: float64
- name: bot_persona_embd_259
dtype: float64
- name: bot_persona_embd_260
dtype: float64
- name: bot_persona_embd_261
dtype: float64
- name: bot_persona_embd_262
dtype: float64
- name: bot_persona_embd_263
dtype: float64
- name: bot_persona_embd_264
dtype: float64
- name: bot_persona_embd_265
dtype: float64
- name: bot_persona_embd_266
dtype: float64
- name: bot_persona_embd_267
dtype: float64
- name: bot_persona_embd_268
dtype: float64
- name: bot_persona_embd_269
dtype: float64
- name: bot_persona_embd_270
dtype: float64
- name: bot_persona_embd_271
dtype: float64
- name: bot_persona_embd_272
dtype: float64
- name: bot_persona_embd_273
dtype: float64
- name: bot_persona_embd_274
dtype: float64
- name: bot_persona_embd_275
dtype: float64
- name: bot_persona_embd_276
dtype: float64
- name: bot_persona_embd_277
dtype: float64
- name: bot_persona_embd_278
dtype: float64
- name: bot_persona_embd_279
dtype: float64
- name: bot_persona_embd_280
dtype: float64
- name: bot_persona_embd_281
dtype: float64
- name: bot_persona_embd_282
dtype: float64
- name: bot_persona_embd_283
dtype: float64
- name: bot_persona_embd_284
dtype: float64
- name: bot_persona_embd_285
dtype: float64
- name: bot_persona_embd_286
dtype: float64
- name: bot_persona_embd_287
dtype: float64
- name: bot_persona_embd_288
dtype: float64
- name: bot_persona_embd_289
dtype: float64
- name: bot_persona_embd_290
dtype: float64
- name: bot_persona_embd_291
dtype: float64
- name: bot_persona_embd_292
dtype: float64
- name: bot_persona_embd_293
dtype: float64
- name: bot_persona_embd_294
dtype: float64
- name: bot_persona_embd_295
dtype: float64
- name: bot_persona_embd_296
dtype: float64
- name: bot_persona_embd_297
dtype: float64
- name: bot_persona_embd_298
dtype: float64
- name: bot_persona_embd_299
dtype: float64
- name: bot_persona_embd_300
dtype: float64
- name: bot_persona_embd_301
dtype: float64
- name: bot_persona_embd_302
dtype: float64
- name: bot_persona_embd_303
dtype: float64
- name: bot_persona_embd_304
dtype: float64
- name: bot_persona_embd_305
dtype: float64
- name: bot_persona_embd_306
dtype: float64
- name: bot_persona_embd_307
dtype: float64
- name: bot_persona_embd_308
dtype: float64
- name: bot_persona_embd_309
dtype: float64
- name: bot_persona_embd_310
dtype: float64
- name: bot_persona_embd_311
dtype: float64
- name: bot_persona_embd_312
dtype: float64
- name: bot_persona_embd_313
dtype: float64
- name: bot_persona_embd_314
dtype: float64
- name: bot_persona_embd_315
dtype: float64
- name: bot_persona_embd_316
dtype: float64
- name: bot_persona_embd_317
dtype: float64
- name: bot_persona_embd_318
dtype: float64
- name: bot_persona_embd_319
dtype: float64
- name: bot_persona_embd_320
dtype: float64
- name: bot_persona_embd_321
dtype: float64
- name: bot_persona_embd_322
dtype: float64
- name: bot_persona_embd_323
dtype: float64
- name: bot_persona_embd_324
dtype: float64
- name: bot_persona_embd_325
dtype: float64
- name: bot_persona_embd_326
dtype: float64
- name: bot_persona_embd_327
dtype: float64
- name: bot_persona_embd_328
dtype: float64
- name: bot_persona_embd_329
dtype: float64
- name: bot_persona_embd_330
dtype: float64
- name: bot_persona_embd_331
dtype: float64
- name: bot_persona_embd_332
dtype: float64
- name: bot_persona_embd_333
dtype: float64
- name: bot_persona_embd_334
dtype: float64
- name: bot_persona_embd_335
dtype: float64
- name: bot_persona_embd_336
dtype: float64
- name: bot_persona_embd_337
dtype: float64
- name: bot_persona_embd_338
dtype: float64
- name: bot_persona_embd_339
dtype: float64
- name: bot_persona_embd_340
dtype: float64
- name: bot_persona_embd_341
dtype: float64
- name: bot_persona_embd_342
dtype: float64
- name: bot_persona_embd_343
dtype: float64
- name: bot_persona_embd_344
dtype: float64
- name: bot_persona_embd_345
dtype: float64
- name: bot_persona_embd_346
dtype: float64
- name: bot_persona_embd_347
dtype: float64
- name: bot_persona_embd_348
dtype: float64
- name: bot_persona_embd_349
dtype: float64
- name: bot_persona_embd_350
dtype: float64
- name: bot_persona_embd_351
dtype: float64
- name: bot_persona_embd_352
dtype: float64
- name: bot_persona_embd_353
dtype: float64
- name: bot_persona_embd_354
dtype: float64
- name: bot_persona_embd_355
dtype: float64
- name: bot_persona_embd_356
dtype: float64
- name: bot_persona_embd_357
dtype: float64
- name: bot_persona_embd_358
dtype: float64
- name: bot_persona_embd_359
dtype: float64
- name: bot_persona_embd_360
dtype: float64
- name: bot_persona_embd_361
dtype: float64
- name: bot_persona_embd_362
dtype: float64
- name: bot_persona_embd_363
dtype: float64
- name: bot_persona_embd_364
dtype: float64
- name: bot_persona_embd_365
dtype: float64
- name: bot_persona_embd_366
dtype: float64
- name: bot_persona_embd_367
dtype: float64
- name: bot_persona_embd_368
dtype: float64
- name: bot_persona_embd_369
dtype: float64
- name: bot_persona_embd_370
dtype: float64
- name: bot_persona_embd_371
dtype: float64
- name: bot_persona_embd_372
dtype: float64
- name: bot_persona_embd_373
dtype: float64
- name: bot_persona_embd_374
dtype: float64
- name: bot_persona_embd_375
dtype: float64
- name: bot_persona_embd_376
dtype: float64
- name: bot_persona_embd_377
dtype: float64
- name: bot_persona_embd_378
dtype: float64
- name: bot_persona_embd_379
dtype: float64
- name: bot_persona_embd_380
dtype: float64
- name: bot_persona_embd_381
dtype: float64
- name: bot_persona_embd_382
dtype: float64
- name: bot_persona_embd_383
dtype: float64
- name: content_type
dtype: int64
- name: user_gender
dtype: int64
- name: user_age
dtype: float64
- name: bot_gender
dtype: int64
- name: year.1
dtype: int64
- name: month.1
dtype: int64
- name: day.1
dtype: int64
- name: hour.1
dtype: int64
- name: hour_sin.1
dtype: float64
- name: hour_cos.1
dtype: float64
- name: weekday.1
dtype: int64
- name: weekday_sin.1
dtype: float64
- name: weekday_cos.1
dtype: float64
- name: minute.1
dtype: int64
- name: second.1
dtype: int64
- name: is_weekend.1
dtype: int64
- name: quarter.1
dtype: int64
- name: user_name_embd_0.1
dtype: float64
- name: user_name_embd_1.1
dtype: float64
- name: user_name_embd_2.1
dtype: float64
- name: user_name_embd_3.1
dtype: float64
- name: user_name_embd_4.1
dtype: float64
- name: user_name_embd_5.1
dtype: float64
- name: user_name_embd_6.1
dtype: float64
- name: user_name_embd_7.1
dtype: float64
- name: user_name_embd_8.1
dtype: float64
- name: user_name_embd_9.1
dtype: float64
- name: user_name_embd_10.1
dtype: float64
- name: user_name_embd_11.1
dtype: float64
- name: user_name_embd_12.1
dtype: float64
- name: user_name_embd_13.1
dtype: float64
- name: user_name_embd_14.1
dtype: float64
- name: user_name_embd_15.1
dtype: float64
- name: user_name_embd_16.1
dtype: float64
- name: user_name_embd_17.1
dtype: float64
- name: user_name_embd_18.1
dtype: float64
- name: user_name_embd_19.1
dtype: float64
- name: user_name_embd_20.1
dtype: float64
- name: user_name_embd_21.1
dtype: float64
- name: user_name_embd_22.1
dtype: float64
- name: user_name_embd_23.1
dtype: float64
- name: user_name_embd_24.1
dtype: float64
- name: user_name_embd_25.1
dtype: float64
- name: user_name_embd_26.1
dtype: float64
- name: user_name_embd_27.1
dtype: float64
- name: user_name_embd_28.1
dtype: float64
- name: user_name_embd_29.1
dtype: float64
- name: user_name_embd_30.1
dtype: float64
- name: user_name_embd_31.1
dtype: float64
- name: user_name_embd_32.1
dtype: float64
- name: user_name_embd_33.1
dtype: float64
- name: user_name_embd_34.1
dtype: float64
- name: user_name_embd_35.1
dtype: float64
- name: user_name_embd_36.1
dtype: float64
- name: user_name_embd_37.1
dtype: float64
- name: user_name_embd_38.1
dtype: float64
- name: user_name_embd_39.1
dtype: float64
- name: user_name_embd_40.1
dtype: float64
- name: user_name_embd_41.1
dtype: float64
- name: user_name_embd_42.1
dtype: float64
- name: user_name_embd_43.1
dtype: float64
- name: user_name_embd_44.1
dtype: float64
- name: user_name_embd_45.1
dtype: float64
- name: user_name_embd_46.1
dtype: float64
- name: user_name_embd_47.1
dtype: float64
- name: user_name_embd_48.1
dtype: float64
- name: user_name_embd_49.1
dtype: float64
- name: user_name_embd_50.1
dtype: float64
- name: user_name_embd_51.1
dtype: float64
- name: user_name_embd_52.1
dtype: float64
- name: user_name_embd_53.1
dtype: float64
- name: user_name_embd_54.1
dtype: float64
- name: user_name_embd_55.1
dtype: float64
- name: user_name_embd_56.1
dtype: float64
- name: user_name_embd_57.1
dtype: float64
- name: user_name_embd_58.1
dtype: float64
- name: user_name_embd_59.1
dtype: float64
- name: user_name_embd_60.1
dtype: float64
- name: user_name_embd_61.1
dtype: float64
- name: user_name_embd_62.1
dtype: float64
- name: user_name_embd_63.1
dtype: float64
- name: user_name_embd_64.1
dtype: float64
- name: user_name_embd_65.1
dtype: float64
- name: user_name_embd_66.1
dtype: float64
- name: user_name_embd_67.1
dtype: float64
- name: user_name_embd_68.1
dtype: float64
- name: user_name_embd_69.1
dtype: float64
- name: user_name_embd_70.1
dtype: float64
- name: user_name_embd_71.1
dtype: float64
- name: user_name_embd_72.1
dtype: float64
- name: user_name_embd_73.1
dtype: float64
- name: user_name_embd_74.1
dtype: float64
- name: user_name_embd_75.1
dtype: float64
- name: user_name_embd_76.1
dtype: float64
- name: user_name_embd_77.1
dtype: float64
- name: user_name_embd_78.1
dtype: float64
- name: user_name_embd_79.1
dtype: float64
- name: user_name_embd_80.1
dtype: float64
- name: user_name_embd_81.1
dtype: float64
- name: user_name_embd_82.1
dtype: float64
- name: user_name_embd_83.1
dtype: float64
- name: user_name_embd_84.1
dtype: float64
- name: user_name_embd_85.1
dtype: float64
- name: user_name_embd_86.1
dtype: float64
- name: user_name_embd_87.1
dtype: float64
- name: user_name_embd_88.1
dtype: float64
- name: user_name_embd_89.1
dtype: float64
- name: user_name_embd_90.1
dtype: float64
- name: user_name_embd_91.1
dtype: float64
- name: user_name_embd_92.1
dtype: float64
- name: user_name_embd_93.1
dtype: float64
- name: user_name_embd_94.1
dtype: float64
- name: user_name_embd_95.1
dtype: float64
- name: user_name_embd_96.1
dtype: float64
- name: user_name_embd_97.1
dtype: float64
- name: user_name_embd_98.1
dtype: float64
- name: user_name_embd_99.1
dtype: float64
- name: user_name_embd_100.1
dtype: float64
- name: user_name_embd_101.1
dtype: float64
- name: user_name_embd_102.1
dtype: float64
- name: user_name_embd_103.1
dtype: float64
- name: user_name_embd_104.1
dtype: float64
- name: user_name_embd_105.1
dtype: float64
- name: user_name_embd_106.1
dtype: float64
- name: user_name_embd_107.1
dtype: float64
- name: user_name_embd_108.1
dtype: float64
- name: user_name_embd_109.1
dtype: float64
- name: user_name_embd_110.1
dtype: float64
- name: user_name_embd_111.1
dtype: float64
- name: user_name_embd_112.1
dtype: float64
- name: user_name_embd_113.1
dtype: float64
- name: user_name_embd_114.1
dtype: float64
- name: user_name_embd_115.1
dtype: float64
- name: user_name_embd_116.1
dtype: float64
- name: user_name_embd_117.1
dtype: float64
- name: user_name_embd_118.1
dtype: float64
- name: user_name_embd_119.1
dtype: float64
- name: user_name_embd_120.1
dtype: float64
- name: user_name_embd_121.1
dtype: float64
- name: user_name_embd_122.1
dtype: float64
- name: user_name_embd_123.1
dtype: float64
- name: user_name_embd_124.1
dtype: float64
- name: user_name_embd_125.1
dtype: float64
- name: user_name_embd_126.1
dtype: float64
- name: user_name_embd_127.1
dtype: float64
- name: user_name_embd_128.1
dtype: float64
- name: user_name_embd_129.1
dtype: float64
- name: user_name_embd_130.1
dtype: float64
- name: user_name_embd_131.1
dtype: float64
- name: user_name_embd_132.1
dtype: float64
- name: user_name_embd_133.1
dtype: float64
- name: user_name_embd_134.1
dtype: float64
- name: user_name_embd_135.1
dtype: float64
- name: user_name_embd_136.1
dtype: float64
- name: user_name_embd_137.1
dtype: float64
- name: user_name_embd_138.1
dtype: float64
- name: user_name_embd_139.1
dtype: float64
- name: user_name_embd_140.1
dtype: float64
- name: user_name_embd_141.1
dtype: float64
- name: user_name_embd_142.1
dtype: float64
- name: user_name_embd_143.1
dtype: float64
- name: user_name_embd_144.1
dtype: float64
- name: user_name_embd_145.1
dtype: float64
- name: user_name_embd_146.1
dtype: float64
- name: user_name_embd_147.1
dtype: float64
- name: user_name_embd_148.1
dtype: float64
- name: user_name_embd_149.1
dtype: float64
- name: user_name_embd_150.1
dtype: float64
- name: user_name_embd_151.1
dtype: float64
- name: user_name_embd_152.1
dtype: float64
- name: user_name_embd_153.1
dtype: float64
- name: user_name_embd_154.1
dtype: float64
- name: user_name_embd_155.1
dtype: float64
- name: user_name_embd_156.1
dtype: float64
- name: user_name_embd_157.1
dtype: float64
- name: user_name_embd_158.1
dtype: float64
- name: user_name_embd_159.1
dtype: float64
- name: user_name_embd_160.1
dtype: float64
- name: user_name_embd_161.1
dtype: float64
- name: user_name_embd_162.1
dtype: float64
- name: user_name_embd_163.1
dtype: float64
- name: user_name_embd_164.1
dtype: float64
- name: user_name_embd_165.1
dtype: float64
- name: user_name_embd_166.1
dtype: float64
- name: user_name_embd_167.1
dtype: float64
- name: user_name_embd_168.1
dtype: float64
- name: user_name_embd_169.1
dtype: float64
- name: user_name_embd_170.1
dtype: float64
- name: user_name_embd_171.1
dtype: float64
- name: user_name_embd_172.1
dtype: float64
- name: user_name_embd_173.1
dtype: float64
- name: user_name_embd_174.1
dtype: float64
- name: user_name_embd_175.1
dtype: float64
- name: user_name_embd_176.1
dtype: float64
- name: user_name_embd_177.1
dtype: float64
- name: user_name_embd_178.1
dtype: float64
- name: user_name_embd_179.1
dtype: float64
- name: user_name_embd_180.1
dtype: float64
- name: user_name_embd_181.1
dtype: float64
- name: user_name_embd_182.1
dtype: float64
- name: user_name_embd_183.1
dtype: float64
- name: user_name_embd_184.1
dtype: float64
- name: user_name_embd_185.1
dtype: float64
- name: user_name_embd_186.1
dtype: float64
- name: user_name_embd_187.1
dtype: float64
- name: user_name_embd_188.1
dtype: float64
- name: user_name_embd_189.1
dtype: float64
- name: user_name_embd_190.1
dtype: float64
- name: user_name_embd_191.1
dtype: float64
- name: user_name_embd_192.1
dtype: float64
- name: user_name_embd_193.1
dtype: float64
- name: user_name_embd_194.1
dtype: float64
- name: user_name_embd_195.1
dtype: float64
- name: user_name_embd_196.1
dtype: float64
- name: user_name_embd_197.1
dtype: float64
- name: user_name_embd_198.1
dtype: float64
- name: user_name_embd_199.1
dtype: float64
- name: user_name_embd_200.1
dtype: float64
- name: user_name_embd_201.1
dtype: float64
- name: user_name_embd_202.1
dtype: float64
- name: user_name_embd_203.1
dtype: float64
- name: user_name_embd_204.1
dtype: float64
- name: user_name_embd_205.1
dtype: float64
- name: user_name_embd_206.1
dtype: float64
- name: user_name_embd_207.1
dtype: float64
- name: user_name_embd_208.1
dtype: float64
- name: user_name_embd_209.1
dtype: float64
- name: user_name_embd_210.1
dtype: float64
- name: user_name_embd_211.1
dtype: float64
- name: user_name_embd_212.1
dtype: float64
- name: user_name_embd_213.1
dtype: float64
- name: user_name_embd_214.1
dtype: float64
- name: user_name_embd_215.1
dtype: float64
- name: user_name_embd_216.1
dtype: float64
- name: user_name_embd_217.1
dtype: float64
- name: user_name_embd_218.1
dtype: float64
- name: user_name_embd_219.1
dtype: float64
- name: user_name_embd_220.1
dtype: float64
- name: user_name_embd_221.1
dtype: float64
- name: user_name_embd_222.1
dtype: float64
- name: user_name_embd_223.1
dtype: float64
- name: user_name_embd_224.1
dtype: float64
- name: user_name_embd_225.1
dtype: float64
- name: user_name_embd_226.1
dtype: float64
- name: user_name_embd_227.1
dtype: float64
- name: user_name_embd_228.1
dtype: float64
- name: user_name_embd_229.1
dtype: float64
- name: user_name_embd_230.1
dtype: float64
- name: user_name_embd_231.1
dtype: float64
- name: user_name_embd_232.1
dtype: float64
- name: user_name_embd_233.1
dtype: float64
- name: user_name_embd_234.1
dtype: float64
- name: user_name_embd_235.1
dtype: float64
- name: user_name_embd_236.1
dtype: float64
- name: user_name_embd_237.1
dtype: float64
- name: user_name_embd_238.1
dtype: float64
- name: user_name_embd_239.1
dtype: float64
- name: user_name_embd_240.1
dtype: float64
- name: user_name_embd_241.1
dtype: float64
- name: user_name_embd_242.1
dtype: float64
- name: user_name_embd_243.1
dtype: float64
- name: user_name_embd_244.1
dtype: float64
- name: user_name_embd_245.1
dtype: float64
- name: user_name_embd_246.1
dtype: float64
- name: user_name_embd_247.1
dtype: float64
- name: user_name_embd_248.1
dtype: float64
- name: user_name_embd_249.1
dtype: float64
- name: user_name_embd_250.1
dtype: float64
- name: user_name_embd_251.1
dtype: float64
- name: user_name_embd_252.1
dtype: float64
- name: user_name_embd_253.1
dtype: float64
- name: user_name_embd_254.1
dtype: float64
- name: user_name_embd_255.1
dtype: float64
- name: user_name_embd_256.1
dtype: float64
- name: user_name_embd_257.1
dtype: float64
- name: user_name_embd_258.1
dtype: float64
- name: user_name_embd_259.1
dtype: float64
- name: user_name_embd_260.1
dtype: float64
- name: user_name_embd_261.1
dtype: float64
- name: user_name_embd_262.1
dtype: float64
- name: user_name_embd_263.1
dtype: float64
- name: user_name_embd_264.1
dtype: float64
- name: user_name_embd_265.1
dtype: float64
- name: user_name_embd_266.1
dtype: float64
- name: user_name_embd_267.1
dtype: float64
- name: user_name_embd_268.1
dtype: float64
- name: user_name_embd_269.1
dtype: float64
- name: user_name_embd_270.1
dtype: float64
- name: user_name_embd_271.1
dtype: float64
- name: user_name_embd_272.1
dtype: float64
- name: user_name_embd_273.1
dtype: float64
- name: user_name_embd_274.1
dtype: float64
- name: user_name_embd_275.1
dtype: float64
- name: user_name_embd_276.1
dtype: float64
- name: user_name_embd_277.1
dtype: float64
- name: user_name_embd_278.1
dtype: float64
- name: user_name_embd_279.1
dtype: float64
- name: user_name_embd_280.1
dtype: float64
- name: user_name_embd_281.1
dtype: float64
- name: user_name_embd_282.1
dtype: float64
- name: user_name_embd_283.1
dtype: float64
- name: user_name_embd_284.1
dtype: float64
- name: user_name_embd_285.1
dtype: float64
- name: user_name_embd_286.1
dtype: float64
- name: user_name_embd_287.1
dtype: float64
- name: user_name_embd_288.1
dtype: float64
- name: user_name_embd_289.1
dtype: float64
- name: user_name_embd_290.1
dtype: float64
- name: user_name_embd_291.1
dtype: float64
- name: user_name_embd_292.1
dtype: float64
- name: user_name_embd_293.1
dtype: float64
- name: user_name_embd_294.1
dtype: float64
- name: user_name_embd_295.1
dtype: float64
- name: user_name_embd_296.1
dtype: float64
- name: user_name_embd_297.1
dtype: float64
- name: user_name_embd_298.1
dtype: float64
- name: user_name_embd_299.1
dtype: float64
- name: user_name_embd_300.1
dtype: float64
- name: user_name_embd_301.1
dtype: float64
- name: user_name_embd_302.1
dtype: float64
- name: user_name_embd_303.1
dtype: float64
- name: user_name_embd_304.1
dtype: float64
- name: user_name_embd_305.1
dtype: float64
- name: user_name_embd_306.1
dtype: float64
- name: user_name_embd_307.1
dtype: float64
- name: user_name_embd_308.1
dtype: float64
- name: user_name_embd_309.1
dtype: float64
- name: user_name_embd_310.1
dtype: float64
- name: user_name_embd_311.1
dtype: float64
- name: user_name_embd_312.1
dtype: float64
- name: user_name_embd_313.1
dtype: float64
- name: user_name_embd_314.1
dtype: float64
- name: user_name_embd_315.1
dtype: float64
- name: user_name_embd_316.1
dtype: float64
- name: user_name_embd_317.1
dtype: float64
- name: user_name_embd_318.1
dtype: float64
- name: user_name_embd_319.1
dtype: float64
- name: user_name_embd_320.1
dtype: float64
- name: user_name_embd_321.1
dtype: float64
- name: user_name_embd_322.1
dtype: float64
- name: user_name_embd_323.1
dtype: float64
- name: user_name_embd_324.1
dtype: float64
- name: user_name_embd_325.1
dtype: float64
- name: user_name_embd_326.1
dtype: float64
- name: user_name_embd_327.1
dtype: float64
- name: user_name_embd_328.1
dtype: float64
- name: user_name_embd_329.1
dtype: float64
- name: user_name_embd_330.1
dtype: float64
- name: user_name_embd_331.1
dtype: float64
- name: user_name_embd_332.1
dtype: float64
- name: user_name_embd_333.1
dtype: float64
- name: user_name_embd_334.1
dtype: float64
- name: user_name_embd_335.1
dtype: float64
- name: user_name_embd_336.1
dtype: float64
- name: user_name_embd_337.1
dtype: float64
- name: user_name_embd_338.1
dtype: float64
- name: user_name_embd_339.1
dtype: float64
- name: user_name_embd_340.1
dtype: float64
- name: user_name_embd_341.1
dtype: float64
- name: user_name_embd_342.1
dtype: float64
- name: user_name_embd_343.1
dtype: float64
- name: user_name_embd_344.1
dtype: float64
- name: user_name_embd_345.1
dtype: float64
- name: user_name_embd_346.1
dtype: float64
- name: user_name_embd_347.1
dtype: float64
- name: user_name_embd_348.1
dtype: float64
- name: user_name_embd_349.1
dtype: float64
- name: user_name_embd_350.1
dtype: float64
- name: user_name_embd_351.1
dtype: float64
- name: user_name_embd_352.1
dtype: float64
- name: user_name_embd_353.1
dtype: float64
- name: user_name_embd_354.1
dtype: float64
- name: user_name_embd_355.1
dtype: float64
- name: user_name_embd_356.1
dtype: float64
- name: user_name_embd_357.1
dtype: float64
- name: user_name_embd_358.1
dtype: float64
- name: user_name_embd_359.1
dtype: float64
- name: user_name_embd_360.1
dtype: float64
- name: user_name_embd_361.1
dtype: float64
- name: user_name_embd_362.1
dtype: float64
- name: user_name_embd_363.1
dtype: float64
- name: user_name_embd_364.1
dtype: float64
- name: user_name_embd_365.1
dtype: float64
- name: user_name_embd_366.1
dtype: float64
- name: user_name_embd_367.1
dtype: float64
- name: user_name_embd_368.1
dtype: float64
- name: user_name_embd_369.1
dtype: float64
- name: user_name_embd_370.1
dtype: float64
- name: user_name_embd_371.1
dtype: float64
- name: user_name_embd_372.1
dtype: float64
- name: user_name_embd_373.1
dtype: float64
- name: user_name_embd_374.1
dtype: float64
- name: user_name_embd_375.1
dtype: float64
- name: user_name_embd_376.1
dtype: float64
- name: user_name_embd_377.1
dtype: float64
- name: user_name_embd_378.1
dtype: float64
- name: user_name_embd_379.1
dtype: float64
- name: user_name_embd_380.1
dtype: float64
- name: user_name_embd_381.1
dtype: float64
- name: user_name_embd_382.1
dtype: float64
- name: user_name_embd_383.1
dtype: float64
- name: bot_name_embd_0.1
dtype: float64
- name: bot_name_embd_1.1
dtype: float64
- name: bot_name_embd_2.1
dtype: float64
- name: bot_name_embd_3.1
dtype: float64
- name: bot_name_embd_4.1
dtype: float64
- name: bot_name_embd_5.1
dtype: float64
- name: bot_name_embd_6.1
dtype: float64
- name: bot_name_embd_7.1
dtype: float64
- name: bot_name_embd_8.1
dtype: float64
- name: bot_name_embd_9.1
dtype: float64
- name: bot_name_embd_10.1
dtype: float64
- name: bot_name_embd_11.1
dtype: float64
- name: bot_name_embd_12.1
dtype: float64
- name: bot_name_embd_13.1
dtype: float64
- name: bot_name_embd_14.1
dtype: float64
- name: bot_name_embd_15.1
dtype: float64
- name: bot_name_embd_16.1
dtype: float64
- name: bot_name_embd_17.1
dtype: float64
- name: bot_name_embd_18.1
dtype: float64
- name: bot_name_embd_19.1
dtype: float64
- name: bot_name_embd_20.1
dtype: float64
- name: bot_name_embd_21.1
dtype: float64
- name: bot_name_embd_22.1
dtype: float64
- name: bot_name_embd_23.1
dtype: float64
- name: bot_name_embd_24.1
dtype: float64
- name: bot_name_embd_25.1
dtype: float64
- name: bot_name_embd_26.1
dtype: float64
- name: bot_name_embd_27.1
dtype: float64
- name: bot_name_embd_28.1
dtype: float64
- name: bot_name_embd_29.1
dtype: float64
- name: bot_name_embd_30.1
dtype: float64
- name: bot_name_embd_31.1
dtype: float64
- name: bot_name_embd_32.1
dtype: float64
- name: bot_name_embd_33.1
dtype: float64
- name: bot_name_embd_34.1
dtype: float64
- name: bot_name_embd_35.1
dtype: float64
- name: bot_name_embd_36.1
dtype: float64
- name: bot_name_embd_37.1
dtype: float64
- name: bot_name_embd_38.1
dtype: float64
- name: bot_name_embd_39.1
dtype: float64
- name: bot_name_embd_40.1
dtype: float64
- name: bot_name_embd_41.1
dtype: float64
- name: bot_name_embd_42.1
dtype: float64
- name: bot_name_embd_43.1
dtype: float64
- name: bot_name_embd_44.1
dtype: float64
- name: bot_name_embd_45.1
dtype: float64
- name: bot_name_embd_46.1
dtype: float64
- name: bot_name_embd_47.1
dtype: float64
- name: bot_name_embd_48.1
dtype: float64
- name: bot_name_embd_49.1
dtype: float64
- name: bot_name_embd_50.1
dtype: float64
- name: bot_name_embd_51.1
dtype: float64
- name: bot_name_embd_52.1
dtype: float64
- name: bot_name_embd_53.1
dtype: float64
- name: bot_name_embd_54.1
dtype: float64
- name: bot_name_embd_55.1
dtype: float64
- name: bot_name_embd_56.1
dtype: float64
- name: bot_name_embd_57.1
dtype: float64
- name: bot_name_embd_58.1
dtype: float64
- name: bot_name_embd_59.1
dtype: float64
- name: bot_name_embd_60.1
dtype: float64
- name: bot_name_embd_61.1
dtype: float64
- name: bot_name_embd_62.1
dtype: float64
- name: bot_name_embd_63.1
dtype: float64
- name: bot_name_embd_64.1
dtype: float64
- name: bot_name_embd_65.1
dtype: float64
- name: bot_name_embd_66.1
dtype: float64
- name: bot_name_embd_67.1
dtype: float64
- name: bot_name_embd_68.1
dtype: float64
- name: bot_name_embd_69.1
dtype: float64
- name: bot_name_embd_70.1
dtype: float64
- name: bot_name_embd_71.1
dtype: float64
- name: bot_name_embd_72.1
dtype: float64
- name: bot_name_embd_73.1
dtype: float64
- name: bot_name_embd_74.1
dtype: float64
- name: bot_name_embd_75.1
dtype: float64
- name: bot_name_embd_76.1
dtype: float64
- name: bot_name_embd_77.1
dtype: float64
- name: bot_name_embd_78.1
dtype: float64
- name: bot_name_embd_79.1
dtype: float64
- name: bot_name_embd_80.1
dtype: float64
- name: bot_name_embd_81.1
dtype: float64
- name: bot_name_embd_82.1
dtype: float64
- name: bot_name_embd_83.1
dtype: float64
- name: bot_name_embd_84.1
dtype: float64
- name: bot_name_embd_85.1
dtype: float64
- name: bot_name_embd_86.1
dtype: float64
- name: bot_name_embd_87.1
dtype: float64
- name: bot_name_embd_88.1
dtype: float64
- name: bot_name_embd_89.1
dtype: float64
- name: bot_name_embd_90.1
dtype: float64
- name: bot_name_embd_91.1
dtype: float64
- name: bot_name_embd_92.1
dtype: float64
- name: bot_name_embd_93.1
dtype: float64
- name: bot_name_embd_94.1
dtype: float64
- name: bot_name_embd_95.1
dtype: float64
- name: bot_name_embd_96.1
dtype: float64
- name: bot_name_embd_97.1
dtype: float64
- name: bot_name_embd_98.1
dtype: float64
- name: bot_name_embd_99.1
dtype: float64
- name: bot_name_embd_100.1
dtype: float64
- name: bot_name_embd_101.1
dtype: float64
- name: bot_name_embd_102.1
dtype: float64
- name: bot_name_embd_103.1
dtype: float64
- name: bot_name_embd_104.1
dtype: float64
- name: bot_name_embd_105.1
dtype: float64
- name: bot_name_embd_106.1
dtype: float64
- name: bot_name_embd_107.1
dtype: float64
- name: bot_name_embd_108.1
dtype: float64
- name: bot_name_embd_109.1
dtype: float64
- name: bot_name_embd_110.1
dtype: float64
- name: bot_name_embd_111.1
dtype: float64
- name: bot_name_embd_112.1
dtype: float64
- name: bot_name_embd_113.1
dtype: float64
- name: bot_name_embd_114.1
dtype: float64
- name: bot_name_embd_115.1
dtype: float64
- name: bot_name_embd_116.1
dtype: float64
- name: bot_name_embd_117.1
dtype: float64
- name: bot_name_embd_118.1
dtype: float64
- name: bot_name_embd_119.1
dtype: float64
- name: bot_name_embd_120.1
dtype: float64
- name: bot_name_embd_121.1
dtype: float64
- name: bot_name_embd_122.1
dtype: float64
- name: bot_name_embd_123.1
dtype: float64
- name: bot_name_embd_124.1
dtype: float64
- name: bot_name_embd_125.1
dtype: float64
- name: bot_name_embd_126.1
dtype: float64
- name: bot_name_embd_127.1
dtype: float64
- name: bot_name_embd_128.1
dtype: float64
- name: bot_name_embd_129.1
dtype: float64
- name: bot_name_embd_130.1
dtype: float64
- name: bot_name_embd_131.1
dtype: float64
- name: bot_name_embd_132.1
dtype: float64
- name: bot_name_embd_133.1
dtype: float64
- name: bot_name_embd_134.1
dtype: float64
- name: bot_name_embd_135.1
dtype: float64
- name: bot_name_embd_136.1
dtype: float64
- name: bot_name_embd_137.1
dtype: float64
- name: bot_name_embd_138.1
dtype: float64
- name: bot_name_embd_139.1
dtype: float64
- name: bot_name_embd_140.1
dtype: float64
- name: bot_name_embd_141.1
dtype: float64
- name: bot_name_embd_142.1
dtype: float64
- name: bot_name_embd_143.1
dtype: float64
- name: bot_name_embd_144.1
dtype: float64
- name: bot_name_embd_145.1
dtype: float64
- name: bot_name_embd_146.1
dtype: float64
- name: bot_name_embd_147.1
dtype: float64
- name: bot_name_embd_148.1
dtype: float64
- name: bot_name_embd_149.1
dtype: float64
- name: bot_name_embd_150.1
dtype: float64
- name: bot_name_embd_151.1
dtype: float64
- name: bot_name_embd_152.1
dtype: float64
- name: bot_name_embd_153.1
dtype: float64
- name: bot_name_embd_154.1
dtype: float64
- name: bot_name_embd_155.1
dtype: float64
- name: bot_name_embd_156.1
dtype: float64
- name: bot_name_embd_157.1
dtype: float64
- name: bot_name_embd_158.1
dtype: float64
- name: bot_name_embd_159.1
dtype: float64
- name: bot_name_embd_160.1
dtype: float64
- name: bot_name_embd_161.1
dtype: float64
- name: bot_name_embd_162.1
dtype: float64
- name: bot_name_embd_163.1
dtype: float64
- name: bot_name_embd_164.1
dtype: float64
- name: bot_name_embd_165.1
dtype: float64
- name: bot_name_embd_166.1
dtype: float64
- name: bot_name_embd_167.1
dtype: float64
- name: bot_name_embd_168.1
dtype: float64
- name: bot_name_embd_169.1
dtype: float64
- name: bot_name_embd_170.1
dtype: float64
- name: bot_name_embd_171.1
dtype: float64
- name: bot_name_embd_172.1
dtype: float64
- name: bot_name_embd_173.1
dtype: float64
- name: bot_name_embd_174.1
dtype: float64
- name: bot_name_embd_175.1
dtype: float64
- name: bot_name_embd_176.1
dtype: float64
- name: bot_name_embd_177.1
dtype: float64
- name: bot_name_embd_178.1
dtype: float64
- name: bot_name_embd_179.1
dtype: float64
- name: bot_name_embd_180.1
dtype: float64
- name: bot_name_embd_181.1
dtype: float64
- name: bot_name_embd_182.1
dtype: float64
- name: bot_name_embd_183.1
dtype: float64
- name: bot_name_embd_184.1
dtype: float64
- name: bot_name_embd_185.1
dtype: float64
- name: bot_name_embd_186.1
dtype: float64
- name: bot_name_embd_187.1
dtype: float64
- name: bot_name_embd_188.1
dtype: float64
- name: bot_name_embd_189.1
dtype: float64
- name: bot_name_embd_190.1
dtype: float64
- name: bot_name_embd_191.1
dtype: float64
- name: bot_name_embd_192.1
dtype: float64
- name: bot_name_embd_193.1
dtype: float64
- name: bot_name_embd_194.1
dtype: float64
- name: bot_name_embd_195.1
dtype: float64
- name: bot_name_embd_196.1
dtype: float64
- name: bot_name_embd_197.1
dtype: float64
- name: bot_name_embd_198.1
dtype: float64
- name: bot_name_embd_199.1
dtype: float64
- name: bot_name_embd_200.1
dtype: float64
- name: bot_name_embd_201.1
dtype: float64
- name: bot_name_embd_202.1
dtype: float64
- name: bot_name_embd_203.1
dtype: float64
- name: bot_name_embd_204.1
dtype: float64
- name: bot_name_embd_205.1
dtype: float64
- name: bot_name_embd_206.1
dtype: float64
- name: bot_name_embd_207.1
dtype: float64
- name: bot_name_embd_208.1
dtype: float64
- name: bot_name_embd_209.1
dtype: float64
- name: bot_name_embd_210.1
dtype: float64
- name: bot_name_embd_211.1
dtype: float64
- name: bot_name_embd_212.1
dtype: float64
- name: bot_name_embd_213.1
dtype: float64
- name: bot_name_embd_214.1
dtype: float64
- name: bot_name_embd_215.1
dtype: float64
- name: bot_name_embd_216.1
dtype: float64
- name: bot_name_embd_217.1
dtype: float64
- name: bot_name_embd_218.1
dtype: float64
- name: bot_name_embd_219.1
dtype: float64
- name: bot_name_embd_220.1
dtype: float64
- name: bot_name_embd_221.1
dtype: float64
- name: bot_name_embd_222.1
dtype: float64
- name: bot_name_embd_223.1
dtype: float64
- name: bot_name_embd_224.1
dtype: float64
- name: bot_name_embd_225.1
dtype: float64
- name: bot_name_embd_226.1
dtype: float64
- name: bot_name_embd_227.1
dtype: float64
- name: bot_name_embd_228.1
dtype: float64
- name: bot_name_embd_229.1
dtype: float64
- name: bot_name_embd_230.1
dtype: float64
- name: bot_name_embd_231.1
dtype: float64
- name: bot_name_embd_232.1
dtype: float64
- name: bot_name_embd_233.1
dtype: float64
- name: bot_name_embd_234.1
dtype: float64
- name: bot_name_embd_235.1
dtype: float64
- name: bot_name_embd_236.1
dtype: float64
- name: bot_name_embd_237.1
dtype: float64
- name: bot_name_embd_238.1
dtype: float64
- name: bot_name_embd_239.1
dtype: float64
- name: bot_name_embd_240.1
dtype: float64
- name: bot_name_embd_241.1
dtype: float64
- name: bot_name_embd_242.1
dtype: float64
- name: bot_name_embd_243.1
dtype: float64
- name: bot_name_embd_244.1
dtype: float64
- name: bot_name_embd_245.1
dtype: float64
- name: bot_name_embd_246.1
dtype: float64
- name: bot_name_embd_247.1
dtype: float64
- name: bot_name_embd_248.1
dtype: float64
- name: bot_name_embd_249.1
dtype: float64
- name: bot_name_embd_250.1
dtype: float64
- name: bot_name_embd_251.1
dtype: float64
- name: bot_name_embd_252.1
dtype: float64
- name: bot_name_embd_253.1
dtype: float64
- name: bot_name_embd_254.1
dtype: float64
- name: bot_name_embd_255.1
dtype: float64
- name: bot_name_embd_256.1
dtype: float64
- name: bot_name_embd_257.1
dtype: float64
- name: bot_name_embd_258.1
dtype: float64
- name: bot_name_embd_259.1
dtype: float64
- name: bot_name_embd_260.1
dtype: float64
- name: bot_name_embd_261.1
dtype: float64
- name: bot_name_embd_262.1
dtype: float64
- name: bot_name_embd_263.1
dtype: float64
- name: bot_name_embd_264.1
dtype: float64
- name: bot_name_embd_265.1
dtype: float64
- name: bot_name_embd_266.1
dtype: float64
- name: bot_name_embd_267.1
dtype: float64
- name: bot_name_embd_268.1
dtype: float64
- name: bot_name_embd_269.1
dtype: float64
- name: bot_name_embd_270.1
dtype: float64
- name: bot_name_embd_271.1
dtype: float64
- name: bot_name_embd_272.1
dtype: float64
- name: bot_name_embd_273.1
dtype: float64
- name: bot_name_embd_274.1
dtype: float64
- name: bot_name_embd_275.1
dtype: float64
- name: bot_name_embd_276.1
dtype: float64
- name: bot_name_embd_277.1
dtype: float64
- name: bot_name_embd_278.1
dtype: float64
- name: bot_name_embd_279.1
dtype: float64
- name: bot_name_embd_280.1
dtype: float64
- name: bot_name_embd_281.1
dtype: float64
- name: bot_name_embd_282.1
dtype: float64
- name: bot_name_embd_283.1
dtype: float64
- name: bot_name_embd_284.1
dtype: float64
- name: bot_name_embd_285.1
dtype: float64
- name: bot_name_embd_286.1
dtype: float64
- name: bot_name_embd_287.1
dtype: float64
- name: bot_name_embd_288.1
dtype: float64
- name: bot_name_embd_289.1
dtype: float64
- name: bot_name_embd_290.1
dtype: float64
- name: bot_name_embd_291.1
dtype: float64
- name: bot_name_embd_292.1
dtype: float64
- name: bot_name_embd_293.1
dtype: float64
- name: bot_name_embd_294.1
dtype: float64
- name: bot_name_embd_295.1
dtype: float64
- name: bot_name_embd_296.1
dtype: float64
- name: bot_name_embd_297.1
dtype: float64
- name: bot_name_embd_298.1
dtype: float64
- name: bot_name_embd_299.1
dtype: float64
- name: bot_name_embd_300.1
dtype: float64
- name: bot_name_embd_301.1
dtype: float64
- name: bot_name_embd_302.1
dtype: float64
- name: bot_name_embd_303.1
dtype: float64
- name: bot_name_embd_304.1
dtype: float64
- name: bot_name_embd_305.1
dtype: float64
- name: bot_name_embd_306.1
dtype: float64
- name: bot_name_embd_307.1
dtype: float64
- name: bot_name_embd_308.1
dtype: float64
- name: bot_name_embd_309.1
dtype: float64
- name: bot_name_embd_310.1
dtype: float64
- name: bot_name_embd_311.1
dtype: float64
- name: bot_name_embd_312.1
dtype: float64
- name: bot_name_embd_313.1
dtype: float64
- name: bot_name_embd_314.1
dtype: float64
- name: bot_name_embd_315.1
dtype: float64
- name: bot_name_embd_316.1
dtype: float64
- name: bot_name_embd_317.1
dtype: float64
- name: bot_name_embd_318.1
dtype: float64
- name: bot_name_embd_319.1
dtype: float64
- name: bot_name_embd_320.1
dtype: float64
- name: bot_name_embd_321.1
dtype: float64
- name: bot_name_embd_322.1
dtype: float64
- name: bot_name_embd_323.1
dtype: float64
- name: bot_name_embd_324.1
dtype: float64
- name: bot_name_embd_325.1
dtype: float64
- name: bot_name_embd_326.1
dtype: float64
- name: bot_name_embd_327.1
dtype: float64
- name: bot_name_embd_328.1
dtype: float64
- name: bot_name_embd_329.1
dtype: float64
- name: bot_name_embd_330.1
dtype: float64
- name: bot_name_embd_331.1
dtype: float64
- name: bot_name_embd_332.1
dtype: float64
- name: bot_name_embd_333.1
dtype: float64
- name: bot_name_embd_334.1
dtype: float64
- name: bot_name_embd_335.1
dtype: float64
- name: bot_name_embd_336.1
dtype: float64
- name: bot_name_embd_337.1
dtype: float64
- name: bot_name_embd_338.1
dtype: float64
- name: bot_name_embd_339.1
dtype: float64
- name: bot_name_embd_340.1
dtype: float64
- name: bot_name_embd_341.1
dtype: float64
- name: bot_name_embd_342.1
dtype: float64
- name: bot_name_embd_343.1
dtype: float64
- name: bot_name_embd_344.1
dtype: float64
- name: bot_name_embd_345.1
dtype: float64
- name: bot_name_embd_346.1
dtype: float64
- name: bot_name_embd_347.1
dtype: float64
- name: bot_name_embd_348.1
dtype: float64
- name: bot_name_embd_349.1
dtype: float64
- name: bot_name_embd_350.1
dtype: float64
- name: bot_name_embd_351.1
dtype: float64
- name: bot_name_embd_352.1
dtype: float64
- name: bot_name_embd_353.1
dtype: float64
- name: bot_name_embd_354.1
dtype: float64
- name: bot_name_embd_355.1
dtype: float64
- name: bot_name_embd_356.1
dtype: float64
- name: bot_name_embd_357.1
dtype: float64
- name: bot_name_embd_358.1
dtype: float64
- name: bot_name_embd_359.1
dtype: float64
- name: bot_name_embd_360.1
dtype: float64
- name: bot_name_embd_361.1
dtype: float64
- name: bot_name_embd_362.1
dtype: float64
- name: bot_name_embd_363.1
dtype: float64
- name: bot_name_embd_364.1
dtype: float64
- name: bot_name_embd_365.1
dtype: float64
- name: bot_name_embd_366.1
dtype: float64
- name: bot_name_embd_367.1
dtype: float64
- name: bot_name_embd_368.1
dtype: float64
- name: bot_name_embd_369.1
dtype: float64
- name: bot_name_embd_370.1
dtype: float64
- name: bot_name_embd_371.1
dtype: float64
- name: bot_name_embd_372.1
dtype: float64
- name: bot_name_embd_373.1
dtype: float64
- name: bot_name_embd_374.1
dtype: float64
- name: bot_name_embd_375.1
dtype: float64
- name: bot_name_embd_376.1
dtype: float64
- name: bot_name_embd_377.1
dtype: float64
- name: bot_name_embd_378.1
dtype: float64
- name: bot_name_embd_379.1
dtype: float64
- name: bot_name_embd_380.1
dtype: float64
- name: bot_name_embd_381.1
dtype: float64
- name: bot_name_embd_382.1
dtype: float64
- name: bot_name_embd_383.1
dtype: float64
- name: bot_persona_embd_0.1
dtype: float64
- name: bot_persona_embd_1.1
dtype: float64
- name: bot_persona_embd_2.1
dtype: float64
- name: bot_persona_embd_3.1
dtype: float64
- name: bot_persona_embd_4.1
dtype: float64
- name: bot_persona_embd_5.1
dtype: float64
- name: bot_persona_embd_6.1
dtype: float64
- name: bot_persona_embd_7.1
dtype: float64
- name: bot_persona_embd_8.1
dtype: float64
- name: bot_persona_embd_9.1
dtype: float64
- name: bot_persona_embd_10.1
dtype: float64
- name: bot_persona_embd_11.1
dtype: float64
- name: bot_persona_embd_12.1
dtype: float64
- name: bot_persona_embd_13.1
dtype: float64
- name: bot_persona_embd_14.1
dtype: float64
- name: bot_persona_embd_15.1
dtype: float64
- name: bot_persona_embd_16.1
dtype: float64
- name: bot_persona_embd_17.1
dtype: float64
- name: bot_persona_embd_18.1
dtype: float64
- name: bot_persona_embd_19.1
dtype: float64
- name: bot_persona_embd_20.1
dtype: float64
- name: bot_persona_embd_21.1
dtype: float64
- name: bot_persona_embd_22.1
dtype: float64
- name: bot_persona_embd_23.1
dtype: float64
- name: bot_persona_embd_24.1
dtype: float64
- name: bot_persona_embd_25.1
dtype: float64
- name: bot_persona_embd_26.1
dtype: float64
- name: bot_persona_embd_27.1
dtype: float64
- name: bot_persona_embd_28.1
dtype: float64
- name: bot_persona_embd_29.1
dtype: float64
- name: bot_persona_embd_30.1
dtype: float64
- name: bot_persona_embd_31.1
dtype: float64
- name: bot_persona_embd_32.1
dtype: float64
- name: bot_persona_embd_33.1
dtype: float64
- name: bot_persona_embd_34.1
dtype: float64
- name: bot_persona_embd_35.1
dtype: float64
- name: bot_persona_embd_36.1
dtype: float64
- name: bot_persona_embd_37.1
dtype: float64
- name: bot_persona_embd_38.1
dtype: float64
- name: bot_persona_embd_39.1
dtype: float64
- name: bot_persona_embd_40.1
dtype: float64
- name: bot_persona_embd_41.1
dtype: float64
- name: bot_persona_embd_42.1
dtype: float64
- name: bot_persona_embd_43.1
dtype: float64
- name: bot_persona_embd_44.1
dtype: float64
- name: bot_persona_embd_45.1
dtype: float64
- name: bot_persona_embd_46.1
dtype: float64
- name: bot_persona_embd_47.1
dtype: float64
- name: bot_persona_embd_48.1
dtype: float64
- name: bot_persona_embd_49.1
dtype: float64
- name: bot_persona_embd_50.1
dtype: float64
- name: bot_persona_embd_51.1
dtype: float64
- name: bot_persona_embd_52.1
dtype: float64
- name: bot_persona_embd_53.1
dtype: float64
- name: bot_persona_embd_54.1
dtype: float64
- name: bot_persona_embd_55.1
dtype: float64
- name: bot_persona_embd_56.1
dtype: float64
- name: bot_persona_embd_57.1
dtype: float64
- name: bot_persona_embd_58.1
dtype: float64
- name: bot_persona_embd_59.1
dtype: float64
- name: bot_persona_embd_60.1
dtype: float64
- name: bot_persona_embd_61.1
dtype: float64
- name: bot_persona_embd_62.1
dtype: float64
- name: bot_persona_embd_63.1
dtype: float64
- name: bot_persona_embd_64.1
dtype: float64
- name: bot_persona_embd_65.1
dtype: float64
- name: bot_persona_embd_66.1
dtype: float64
- name: bot_persona_embd_67.1
dtype: float64
- name: bot_persona_embd_68.1
dtype: float64
- name: bot_persona_embd_69.1
dtype: float64
- name: bot_persona_embd_70.1
dtype: float64
- name: bot_persona_embd_71.1
dtype: float64
- name: bot_persona_embd_72.1
dtype: float64
- name: bot_persona_embd_73.1
dtype: float64
- name: bot_persona_embd_74.1
dtype: float64
- name: bot_persona_embd_75.1
dtype: float64
- name: bot_persona_embd_76.1
dtype: float64
- name: bot_persona_embd_77.1
dtype: float64
- name: bot_persona_embd_78.1
dtype: float64
- name: bot_persona_embd_79.1
dtype: float64
- name: bot_persona_embd_80.1
dtype: float64
- name: bot_persona_embd_81.1
dtype: float64
- name: bot_persona_embd_82.1
dtype: float64
- name: bot_persona_embd_83.1
dtype: float64
- name: bot_persona_embd_84.1
dtype: float64
- name: bot_persona_embd_85.1
dtype: float64
- name: bot_persona_embd_86.1
dtype: float64
- name: bot_persona_embd_87.1
dtype: float64
- name: bot_persona_embd_88.1
dtype: float64
- name: bot_persona_embd_89.1
dtype: float64
- name: bot_persona_embd_90.1
dtype: float64
- name: bot_persona_embd_91.1
dtype: float64
- name: bot_persona_embd_92.1
dtype: float64
- name: bot_persona_embd_93.1
dtype: float64
- name: bot_persona_embd_94.1
dtype: float64
- name: bot_persona_embd_95.1
dtype: float64
- name: bot_persona_embd_96.1
dtype: float64
- name: bot_persona_embd_97.1
dtype: float64
- name: bot_persona_embd_98.1
dtype: float64
- name: bot_persona_embd_99.1
dtype: float64
- name: bot_persona_embd_100.1
dtype: float64
- name: bot_persona_embd_101.1
dtype: float64
- name: bot_persona_embd_102.1
dtype: float64
- name: bot_persona_embd_103.1
dtype: float64
- name: bot_persona_embd_104.1
dtype: float64
- name: bot_persona_embd_105.1
dtype: float64
- name: bot_persona_embd_106.1
dtype: float64
- name: bot_persona_embd_107.1
dtype: float64
- name: bot_persona_embd_108.1
dtype: float64
- name: bot_persona_embd_109.1
dtype: float64
- name: bot_persona_embd_110.1
dtype: float64
- name: bot_persona_embd_111.1
dtype: float64
- name: bot_persona_embd_112.1
dtype: float64
- name: bot_persona_embd_113.1
dtype: float64
- name: bot_persona_embd_114.1
dtype: float64
- name: bot_persona_embd_115.1
dtype: float64
- name: bot_persona_embd_116.1
dtype: float64
- name: bot_persona_embd_117.1
dtype: float64
- name: bot_persona_embd_118.1
dtype: float64
- name: bot_persona_embd_119.1
dtype: float64
- name: bot_persona_embd_120.1
dtype: float64
- name: bot_persona_embd_121.1
dtype: float64
- name: bot_persona_embd_122.1
dtype: float64
- name: bot_persona_embd_123.1
dtype: float64
- name: bot_persona_embd_124.1
dtype: float64
- name: bot_persona_embd_125.1
dtype: float64
- name: bot_persona_embd_126.1
dtype: float64
- name: bot_persona_embd_127.1
dtype: float64
- name: bot_persona_embd_128.1
dtype: float64
- name: bot_persona_embd_129.1
dtype: float64
- name: bot_persona_embd_130.1
dtype: float64
- name: bot_persona_embd_131.1
dtype: float64
- name: bot_persona_embd_132.1
dtype: float64
- name: bot_persona_embd_133.1
dtype: float64
- name: bot_persona_embd_134.1
dtype: float64
- name: bot_persona_embd_135.1
dtype: float64
- name: bot_persona_embd_136.1
dtype: float64
- name: bot_persona_embd_137.1
dtype: float64
- name: bot_persona_embd_138.1
dtype: float64
- name: bot_persona_embd_139.1
dtype: float64
- name: bot_persona_embd_140.1
dtype: float64
- name: bot_persona_embd_141.1
dtype: float64
- name: bot_persona_embd_142.1
dtype: float64
- name: bot_persona_embd_143.1
dtype: float64
- name: bot_persona_embd_144.1
dtype: float64
- name: bot_persona_embd_145.1
dtype: float64
- name: bot_persona_embd_146.1
dtype: float64
- name: bot_persona_embd_147.1
dtype: float64
- name: bot_persona_embd_148.1
dtype: float64
- name: bot_persona_embd_149.1
dtype: float64
- name: bot_persona_embd_150.1
dtype: float64
- name: bot_persona_embd_151.1
dtype: float64
- name: bot_persona_embd_152.1
dtype: float64
- name: bot_persona_embd_153.1
dtype: float64
- name: bot_persona_embd_154.1
dtype: float64
- name: bot_persona_embd_155.1
dtype: float64
- name: bot_persona_embd_156.1
dtype: float64
- name: bot_persona_embd_157.1
dtype: float64
- name: bot_persona_embd_158.1
dtype: float64
- name: bot_persona_embd_159.1
dtype: float64
- name: bot_persona_embd_160.1
dtype: float64
- name: bot_persona_embd_161.1
dtype: float64
- name: bot_persona_embd_162.1
dtype: float64
- name: bot_persona_embd_163.1
dtype: float64
- name: bot_persona_embd_164.1
dtype: float64
- name: bot_persona_embd_165.1
dtype: float64
- name: bot_persona_embd_166.1
dtype: float64
- name: bot_persona_embd_167.1
dtype: float64
- name: bot_persona_embd_168.1
dtype: float64
- name: bot_persona_embd_169.1
dtype: float64
- name: bot_persona_embd_170.1
dtype: float64
- name: bot_persona_embd_171.1
dtype: float64
- name: bot_persona_embd_172.1
dtype: float64
- name: bot_persona_embd_173.1
dtype: float64
- name: bot_persona_embd_174.1
dtype: float64
- name: bot_persona_embd_175.1
dtype: float64
- name: bot_persona_embd_176.1
dtype: float64
- name: bot_persona_embd_177.1
dtype: float64
- name: bot_persona_embd_178.1
dtype: float64
- name: bot_persona_embd_179.1
dtype: float64
- name: bot_persona_embd_180.1
dtype: float64
- name: bot_persona_embd_181.1
dtype: float64
- name: bot_persona_embd_182.1
dtype: float64
- name: bot_persona_embd_183.1
dtype: float64
- name: bot_persona_embd_184.1
dtype: float64
- name: bot_persona_embd_185.1
dtype: float64
- name: bot_persona_embd_186.1
dtype: float64
- name: bot_persona_embd_187.1
dtype: float64
- name: bot_persona_embd_188.1
dtype: float64
- name: bot_persona_embd_189.1
dtype: float64
- name: bot_persona_embd_190.1
dtype: float64
- name: bot_persona_embd_191.1
dtype: float64
- name: bot_persona_embd_192.1
dtype: float64
- name: bot_persona_embd_193.1
dtype: float64
- name: bot_persona_embd_194.1
dtype: float64
- name: bot_persona_embd_195.1
dtype: float64
- name: bot_persona_embd_196.1
dtype: float64
- name: bot_persona_embd_197.1
dtype: float64
- name: bot_persona_embd_198.1
dtype: float64
- name: bot_persona_embd_199.1
dtype: float64
- name: bot_persona_embd_200.1
dtype: float64
- name: bot_persona_embd_201.1
dtype: float64
- name: bot_persona_embd_202.1
dtype: float64
- name: bot_persona_embd_203.1
dtype: float64
- name: bot_persona_embd_204.1
dtype: float64
- name: bot_persona_embd_205.1
dtype: float64
- name: bot_persona_embd_206.1
dtype: float64
- name: bot_persona_embd_207.1
dtype: float64
- name: bot_persona_embd_208.1
dtype: float64
- name: bot_persona_embd_209.1
dtype: float64
- name: bot_persona_embd_210.1
dtype: float64
- name: bot_persona_embd_211.1
dtype: float64
- name: bot_persona_embd_212.1
dtype: float64
- name: bot_persona_embd_213.1
dtype: float64
- name: bot_persona_embd_214.1
dtype: float64
- name: bot_persona_embd_215.1
dtype: float64
- name: bot_persona_embd_216.1
dtype: float64
- name: bot_persona_embd_217.1
dtype: float64
- name: bot_persona_embd_218.1
dtype: float64
- name: bot_persona_embd_219.1
dtype: float64
- name: bot_persona_embd_220.1
dtype: float64
- name: bot_persona_embd_221.1
dtype: float64
- name: bot_persona_embd_222.1
dtype: float64
- name: bot_persona_embd_223.1
dtype: float64
- name: bot_persona_embd_224.1
dtype: float64
- name: bot_persona_embd_225.1
dtype: float64
- name: bot_persona_embd_226.1
dtype: float64
- name: bot_persona_embd_227.1
dtype: float64
- name: bot_persona_embd_228.1
dtype: float64
- name: bot_persona_embd_229.1
dtype: float64
- name: bot_persona_embd_230.1
dtype: float64
- name: bot_persona_embd_231.1
dtype: float64
- name: bot_persona_embd_232.1
dtype: float64
- name: bot_persona_embd_233.1
dtype: float64
- name: bot_persona_embd_234.1
dtype: float64
- name: bot_persona_embd_235.1
dtype: float64
- name: bot_persona_embd_236.1
dtype: float64
- name: bot_persona_embd_237.1
dtype: float64
- name: bot_persona_embd_238.1
dtype: float64
- name: bot_persona_embd_239.1
dtype: float64
- name: bot_persona_embd_240.1
dtype: float64
- name: bot_persona_embd_241.1
dtype: float64
- name: bot_persona_embd_242.1
dtype: float64
- name: bot_persona_embd_243.1
dtype: float64
- name: bot_persona_embd_244.1
dtype: float64
- name: bot_persona_embd_245.1
dtype: float64
- name: bot_persona_embd_246.1
dtype: float64
- name: bot_persona_embd_247.1
dtype: float64
- name: bot_persona_embd_248.1
dtype: float64
- name: bot_persona_embd_249.1
dtype: float64
- name: bot_persona_embd_250.1
dtype: float64
- name: bot_persona_embd_251.1
dtype: float64
- name: bot_persona_embd_252.1
dtype: float64
- name: bot_persona_embd_253.1
dtype: float64
- name: bot_persona_embd_254.1
dtype: float64
- name: bot_persona_embd_255.1
dtype: float64
- name: bot_persona_embd_256.1
dtype: float64
- name: bot_persona_embd_257.1
dtype: float64
- name: bot_persona_embd_258.1
dtype: float64
- name: bot_persona_embd_259.1
dtype: float64
- name: bot_persona_embd_260.1
dtype: float64
- name: bot_persona_embd_261.1
dtype: float64
- name: bot_persona_embd_262.1
dtype: float64
- name: bot_persona_embd_263.1
dtype: float64
- name: bot_persona_embd_264.1
dtype: float64
- name: bot_persona_embd_265.1
dtype: float64
- name: bot_persona_embd_266.1
dtype: float64
- name: bot_persona_embd_267.1
dtype: float64
- name: bot_persona_embd_268.1
dtype: float64
- name: bot_persona_embd_269.1
dtype: float64
- name: bot_persona_embd_270.1
dtype: float64
- name: bot_persona_embd_271.1
dtype: float64
- name: bot_persona_embd_272.1
dtype: float64
- name: bot_persona_embd_273.1
dtype: float64
- name: bot_persona_embd_274.1
dtype: float64
- name: bot_persona_embd_275.1
dtype: float64
- name: bot_persona_embd_276.1
dtype: float64
- name: bot_persona_embd_277.1
dtype: float64
- name: bot_persona_embd_278.1
dtype: float64
- name: bot_persona_embd_279.1
dtype: float64
- name: bot_persona_embd_280.1
dtype: float64
- name: bot_persona_embd_281.1
dtype: float64
- name: bot_persona_embd_282.1
dtype: float64
- name: bot_persona_embd_283.1
dtype: float64
- name: bot_persona_embd_284.1
dtype: float64
- name: bot_persona_embd_285.1
dtype: float64
- name: bot_persona_embd_286.1
dtype: float64
- name: bot_persona_embd_287.1
dtype: float64
- name: bot_persona_embd_288.1
dtype: float64
- name: bot_persona_embd_289.1
dtype: float64
- name: bot_persona_embd_290.1
dtype: float64
- name: bot_persona_embd_291.1
dtype: float64
- name: bot_persona_embd_292.1
dtype: float64
- name: bot_persona_embd_293.1
dtype: float64
- name: bot_persona_embd_294.1
dtype: float64
- name: bot_persona_embd_295.1
dtype: float64
- name: bot_persona_embd_296.1
dtype: float64
- name: bot_persona_embd_297.1
dtype: float64
- name: bot_persona_embd_298.1
dtype: float64
- name: bot_persona_embd_299.1
dtype: float64
- name: bot_persona_embd_300.1
dtype: float64
- name: bot_persona_embd_301.1
dtype: float64
- name: bot_persona_embd_302.1
dtype: float64
- name: bot_persona_embd_303.1
dtype: float64
- name: bot_persona_embd_304.1
dtype: float64
- name: bot_persona_embd_305.1
dtype: float64
- name: bot_persona_embd_306.1
dtype: float64
- name: bot_persona_embd_307.1
dtype: float64
- name: bot_persona_embd_308.1
dtype: float64
- name: bot_persona_embd_309.1
dtype: float64
- name: bot_persona_embd_310.1
dtype: float64
- name: bot_persona_embd_311.1
dtype: float64
- name: bot_persona_embd_312.1
dtype: float64
- name: bot_persona_embd_313.1
dtype: float64
- name: bot_persona_embd_314.1
dtype: float64
- name: bot_persona_embd_315.1
dtype: float64
- name: bot_persona_embd_316.1
dtype: float64
- name: bot_persona_embd_317.1
dtype: float64
- name: bot_persona_embd_318.1
dtype: float64
- name: bot_persona_embd_319.1
dtype: float64
- name: bot_persona_embd_320.1
dtype: float64
- name: bot_persona_embd_321.1
dtype: float64
- name: bot_persona_embd_322.1
dtype: float64
- name: bot_persona_embd_323.1
dtype: float64
- name: bot_persona_embd_324.1
dtype: float64
- name: bot_persona_embd_325.1
dtype: float64
- name: bot_persona_embd_326.1
dtype: float64
- name: bot_persona_embd_327.1
dtype: float64
- name: bot_persona_embd_328.1
dtype: float64
- name: bot_persona_embd_329.1
dtype: float64
- name: bot_persona_embd_330.1
dtype: float64
- name: bot_persona_embd_331.1
dtype: float64
- name: bot_persona_embd_332.1
dtype: float64
- name: bot_persona_embd_333.1
dtype: float64
- name: bot_persona_embd_334.1
dtype: float64
- name: bot_persona_embd_335.1
dtype: float64
- name: bot_persona_embd_336.1
dtype: float64
- name: bot_persona_embd_337.1
dtype: float64
- name: bot_persona_embd_338.1
dtype: float64
- name: bot_persona_embd_339.1
dtype: float64
- name: bot_persona_embd_340.1
dtype: float64
- name: bot_persona_embd_341.1
dtype: float64
- name: bot_persona_embd_342.1
dtype: float64
- name: bot_persona_embd_343.1
dtype: float64
- name: bot_persona_embd_344.1
dtype: float64
- name: bot_persona_embd_345.1
dtype: float64
- name: bot_persona_embd_346.1
dtype: float64
- name: bot_persona_embd_347.1
dtype: float64
- name: bot_persona_embd_348.1
dtype: float64
- name: bot_persona_embd_349.1
dtype: float64
- name: bot_persona_embd_350.1
dtype: float64
- name: bot_persona_embd_351.1
dtype: float64
- name: bot_persona_embd_352.1
dtype: float64
- name: bot_persona_embd_353.1
dtype: float64
- name: bot_persona_embd_354.1
dtype: float64
- name: bot_persona_embd_355.1
dtype: float64
- name: bot_persona_embd_356.1
dtype: float64
- name: bot_persona_embd_357.1
dtype: float64
- name: bot_persona_embd_358.1
dtype: float64
- name: bot_persona_embd_359.1
dtype: float64
- name: bot_persona_embd_360.1
dtype: float64
- name: bot_persona_embd_361.1
dtype: float64
- name: bot_persona_embd_362.1
dtype: float64
- name: bot_persona_embd_363.1
dtype: float64
- name: bot_persona_embd_364.1
dtype: float64
- name: bot_persona_embd_365.1
dtype: float64
- name: bot_persona_embd_366.1
dtype: float64
- name: bot_persona_embd_367.1
dtype: float64
- name: bot_persona_embd_368.1
dtype: float64
- name: bot_persona_embd_369.1
dtype: float64
- name: bot_persona_embd_370.1
dtype: float64
- name: bot_persona_embd_371.1
dtype: float64
- name: bot_persona_embd_372.1
dtype: float64
- name: bot_persona_embd_373.1
dtype: float64
- name: bot_persona_embd_374.1
dtype: float64
- name: bot_persona_embd_375.1
dtype: float64
- name: bot_persona_embd_376.1
dtype: float64
- name: bot_persona_embd_377.1
dtype: float64
- name: bot_persona_embd_378.1
dtype: float64
- name: bot_persona_embd_379.1
dtype: float64
- name: bot_persona_embd_380.1
dtype: float64
- name: bot_persona_embd_381.1
dtype: float64
- name: bot_persona_embd_382.1
dtype: float64
- name: bot_persona_embd_383.1
dtype: float64
- name: content_type.1
dtype: int64
- name: user_gender.1
dtype: int64
- name: user_age.1
dtype: float64
- name: bot_gender.1
dtype: int64
- name: year.2
dtype: int64
- name: month.2
dtype: int64
- name: day.2
dtype: int64
- name: hour.2
dtype: int64
- name: hour_sin.2
dtype: float64
- name: hour_cos.2
dtype: float64
- name: weekday.2
dtype: int64
- name: weekday_sin.2
dtype: float64
- name: weekday_cos.2
dtype: float64
- name: minute.2
dtype: int64
- name: second.2
dtype: int64
- name: is_weekend.2
dtype: int64
- name: quarter.2
dtype: int64
- name: user_name_embd_0.2
dtype: float64
- name: user_name_embd_1.2
dtype: float64
- name: user_name_embd_2.2
dtype: float64
- name: user_name_embd_3.2
dtype: float64
- name: user_name_embd_4.2
dtype: float64
- name: user_name_embd_5.2
dtype: float64
- name: user_name_embd_6.2
dtype: float64
- name: user_name_embd_7.2
dtype: float64
- name: user_name_embd_8.2
dtype: float64
- name: user_name_embd_9.2
dtype: float64
- name: user_name_embd_10.2
dtype: float64
- name: user_name_embd_11.2
dtype: float64
- name: user_name_embd_12.2
dtype: float64
- name: user_name_embd_13.2
dtype: float64
- name: user_name_embd_14.2
dtype: float64
- name: user_name_embd_15.2
dtype: float64
- name: user_name_embd_16.2
dtype: float64
- name: user_name_embd_17.2
dtype: float64
- name: user_name_embd_18.2
dtype: float64
- name: user_name_embd_19.2
dtype: float64
- name: user_name_embd_20.2
dtype: float64
- name: user_name_embd_21.2
dtype: float64
- name: user_name_embd_22.2
dtype: float64
- name: user_name_embd_23.2
dtype: float64
- name: user_name_embd_24.2
dtype: float64
- name: user_name_embd_25.2
dtype: float64
- name: user_name_embd_26.2
dtype: float64
- name: user_name_embd_27.2
dtype: float64
- name: user_name_embd_28.2
dtype: float64
- name: user_name_embd_29.2
dtype: float64
- name: user_name_embd_30.2
dtype: float64
- name: user_name_embd_31.2
dtype: float64
- name: user_name_embd_32.2
dtype: float64
- name: user_name_embd_33.2
dtype: float64
- name: user_name_embd_34.2
dtype: float64
- name: user_name_embd_35.2
dtype: float64
- name: user_name_embd_36.2
dtype: float64
- name: user_name_embd_37.2
dtype: float64
- name: user_name_embd_38.2
dtype: float64
- name: user_name_embd_39.2
dtype: float64
- name: user_name_embd_40.2
dtype: float64
- name: user_name_embd_41.2
dtype: float64
- name: user_name_embd_42.2
dtype: float64
- name: user_name_embd_43.2
dtype: float64
- name: user_name_embd_44.2
dtype: float64
- name: user_name_embd_45.2
dtype: float64
- name: user_name_embd_46.2
dtype: float64
- name: user_name_embd_47.2
dtype: float64
- name: user_name_embd_48.2
dtype: float64
- name: user_name_embd_49.2
dtype: float64
- name: user_name_embd_50.2
dtype: float64
- name: user_name_embd_51.2
dtype: float64
- name: user_name_embd_52.2
dtype: float64
- name: user_name_embd_53.2
dtype: float64
- name: user_name_embd_54.2
dtype: float64
- name: user_name_embd_55.2
dtype: float64
- name: user_name_embd_56.2
dtype: float64
- name: user_name_embd_57.2
dtype: float64
- name: user_name_embd_58.2
dtype: float64
- name: user_name_embd_59.2
dtype: float64
- name: user_name_embd_60.2
dtype: float64
- name: user_name_embd_61.2
dtype: float64
- name: user_name_embd_62.2
dtype: float64
- name: user_name_embd_63.2
dtype: float64
- name: user_name_embd_64.2
dtype: float64
- name: user_name_embd_65.2
dtype: float64
- name: user_name_embd_66.2
dtype: float64
- name: user_name_embd_67.2
dtype: float64
- name: user_name_embd_68.2
dtype: float64
- name: user_name_embd_69.2
dtype: float64
- name: user_name_embd_70.2
dtype: float64
- name: user_name_embd_71.2
dtype: float64
- name: user_name_embd_72.2
dtype: float64
- name: user_name_embd_73.2
dtype: float64
- name: user_name_embd_74.2
dtype: float64
- name: user_name_embd_75.2
dtype: float64
- name: user_name_embd_76.2
dtype: float64
- name: user_name_embd_77.2
dtype: float64
- name: user_name_embd_78.2
dtype: float64
- name: user_name_embd_79.2
dtype: float64
- name: user_name_embd_80.2
dtype: float64
- name: user_name_embd_81.2
dtype: float64
- name: user_name_embd_82.2
dtype: float64
- name: user_name_embd_83.2
dtype: float64
- name: user_name_embd_84.2
dtype: float64
- name: user_name_embd_85.2
dtype: float64
- name: user_name_embd_86.2
dtype: float64
- name: user_name_embd_87.2
dtype: float64
- name: user_name_embd_88.2
dtype: float64
- name: user_name_embd_89.2
dtype: float64
- name: user_name_embd_90.2
dtype: float64
- name: user_name_embd_91.2
dtype: float64
- name: user_name_embd_92.2
dtype: float64
- name: user_name_embd_93.2
dtype: float64
- name: user_name_embd_94.2
dtype: float64
- name: user_name_embd_95.2
dtype: float64
- name: user_name_embd_96.2
dtype: float64
- name: user_name_embd_97.2
dtype: float64
- name: user_name_embd_98.2
dtype: float64
- name: user_name_embd_99.2
dtype: float64
- name: user_name_embd_100.2
dtype: float64
- name: user_name_embd_101.2
dtype: float64
- name: user_name_embd_102.2
dtype: float64
- name: user_name_embd_103.2
dtype: float64
- name: user_name_embd_104.2
dtype: float64
- name: user_name_embd_105.2
dtype: float64
- name: user_name_embd_106.2
dtype: float64
- name: user_name_embd_107.2
dtype: float64
- name: user_name_embd_108.2
dtype: float64
- name: user_name_embd_109.2
dtype: float64
- name: user_name_embd_110.2
dtype: float64
- name: user_name_embd_111.2
dtype: float64
- name: user_name_embd_112.2
dtype: float64
- name: user_name_embd_113.2
dtype: float64
- name: user_name_embd_114.2
dtype: float64
- name: user_name_embd_115.2
dtype: float64
- name: user_name_embd_116.2
dtype: float64
- name: user_name_embd_117.2
dtype: float64
- name: user_name_embd_118.2
dtype: float64
- name: user_name_embd_119.2
dtype: float64
- name: user_name_embd_120.2
dtype: float64
- name: user_name_embd_121.2
dtype: float64
- name: user_name_embd_122.2
dtype: float64
- name: user_name_embd_123.2
dtype: float64
- name: user_name_embd_124.2
dtype: float64
- name: user_name_embd_125.2
dtype: float64
- name: user_name_embd_126.2
dtype: float64
- name: user_name_embd_127.2
dtype: float64
- name: user_name_embd_128.2
dtype: float64
- name: user_name_embd_129.2
dtype: float64
- name: user_name_embd_130.2
dtype: float64
- name: user_name_embd_131.2
dtype: float64
- name: user_name_embd_132.2
dtype: float64
- name: user_name_embd_133.2
dtype: float64
- name: user_name_embd_134.2
dtype: float64
- name: user_name_embd_135.2
dtype: float64
- name: user_name_embd_136.2
dtype: float64
- name: user_name_embd_137.2
dtype: float64
- name: user_name_embd_138.2
dtype: float64
- name: user_name_embd_139.2
dtype: float64
- name: user_name_embd_140.2
dtype: float64
- name: user_name_embd_141.2
dtype: float64
- name: user_name_embd_142.2
dtype: float64
- name: user_name_embd_143.2
dtype: float64
- name: user_name_embd_144.2
dtype: float64
- name: user_name_embd_145.2
dtype: float64
- name: user_name_embd_146.2
dtype: float64
- name: user_name_embd_147.2
dtype: float64
- name: user_name_embd_148.2
dtype: float64
- name: user_name_embd_149.2
dtype: float64
- name: user_name_embd_150.2
dtype: float64
- name: user_name_embd_151.2
dtype: float64
- name: user_name_embd_152.2
dtype: float64
- name: user_name_embd_153.2
dtype: float64
- name: user_name_embd_154.2
dtype: float64
- name: user_name_embd_155.2
dtype: float64
- name: user_name_embd_156.2
dtype: float64
- name: user_name_embd_157.2
dtype: float64
- name: user_name_embd_158.2
dtype: float64
- name: user_name_embd_159.2
dtype: float64
- name: user_name_embd_160.2
dtype: float64
- name: user_name_embd_161.2
dtype: float64
- name: user_name_embd_162.2
dtype: float64
- name: user_name_embd_163.2
dtype: float64
- name: user_name_embd_164.2
dtype: float64
- name: user_name_embd_165.2
dtype: float64
- name: user_name_embd_166.2
dtype: float64
- name: user_name_embd_167.2
dtype: float64
- name: user_name_embd_168.2
dtype: float64
- name: user_name_embd_169.2
dtype: float64
- name: user_name_embd_170.2
dtype: float64
- name: user_name_embd_171.2
dtype: float64
- name: user_name_embd_172.2
dtype: float64
- name: user_name_embd_173.2
dtype: float64
- name: user_name_embd_174.2
dtype: float64
- name: user_name_embd_175.2
dtype: float64
- name: user_name_embd_176.2
dtype: float64
- name: user_name_embd_177.2
dtype: float64
- name: user_name_embd_178.2
dtype: float64
- name: user_name_embd_179.2
dtype: float64
- name: user_name_embd_180.2
dtype: float64
- name: user_name_embd_181.2
dtype: float64
- name: user_name_embd_182.2
dtype: float64
- name: user_name_embd_183.2
dtype: float64
- name: user_name_embd_184.2
dtype: float64
- name: user_name_embd_185.2
dtype: float64
- name: user_name_embd_186.2
dtype: float64
- name: user_name_embd_187.2
dtype: float64
- name: user_name_embd_188.2
dtype: float64
- name: user_name_embd_189.2
dtype: float64
- name: user_name_embd_190.2
dtype: float64
- name: user_name_embd_191.2
dtype: float64
- name: user_name_embd_192.2
dtype: float64
- name: user_name_embd_193.2
dtype: float64
- name: user_name_embd_194.2
dtype: float64
- name: user_name_embd_195.2
dtype: float64
- name: user_name_embd_196.2
dtype: float64
- name: user_name_embd_197.2
dtype: float64
- name: user_name_embd_198.2
dtype: float64
- name: user_name_embd_199.2
dtype: float64
- name: user_name_embd_200.2
dtype: float64
- name: user_name_embd_201.2
dtype: float64
- name: user_name_embd_202.2
dtype: float64
- name: user_name_embd_203.2
dtype: float64
- name: user_name_embd_204.2
dtype: float64
- name: user_name_embd_205.2
dtype: float64
- name: user_name_embd_206.2
dtype: float64
- name: user_name_embd_207.2
dtype: float64
- name: user_name_embd_208.2
dtype: float64
- name: user_name_embd_209.2
dtype: float64
- name: user_name_embd_210.2
dtype: float64
- name: user_name_embd_211.2
dtype: float64
- name: user_name_embd_212.2
dtype: float64
- name: user_name_embd_213.2
dtype: float64
- name: user_name_embd_214.2
dtype: float64
- name: user_name_embd_215.2
dtype: float64
- name: user_name_embd_216.2
dtype: float64
- name: user_name_embd_217.2
dtype: float64
- name: user_name_embd_218.2
dtype: float64
- name: user_name_embd_219.2
dtype: float64
- name: user_name_embd_220.2
dtype: float64
- name: user_name_embd_221.2
dtype: float64
- name: user_name_embd_222.2
dtype: float64
- name: user_name_embd_223.2
dtype: float64
- name: user_name_embd_224.2
dtype: float64
- name: user_name_embd_225.2
dtype: float64
- name: user_name_embd_226.2
dtype: float64
- name: user_name_embd_227.2
dtype: float64
- name: user_name_embd_228.2
dtype: float64
- name: user_name_embd_229.2
dtype: float64
- name: user_name_embd_230.2
dtype: float64
- name: user_name_embd_231.2
dtype: float64
- name: user_name_embd_232.2
dtype: float64
- name: user_name_embd_233.2
dtype: float64
- name: user_name_embd_234.2
dtype: float64
- name: user_name_embd_235.2
dtype: float64
- name: user_name_embd_236.2
dtype: float64
- name: user_name_embd_237.2
dtype: float64
- name: user_name_embd_238.2
dtype: float64
- name: user_name_embd_239.2
dtype: float64
- name: user_name_embd_240.2
dtype: float64
- name: user_name_embd_241.2
dtype: float64
- name: user_name_embd_242.2
dtype: float64
- name: user_name_embd_243.2
dtype: float64
- name: user_name_embd_244.2
dtype: float64
- name: user_name_embd_245.2
dtype: float64
- name: user_name_embd_246.2
dtype: float64
- name: user_name_embd_247.2
dtype: float64
- name: user_name_embd_248.2
dtype: float64
- name: user_name_embd_249.2
dtype: float64
- name: user_name_embd_250.2
dtype: float64
- name: user_name_embd_251.2
dtype: float64
- name: user_name_embd_252.2
dtype: float64
- name: user_name_embd_253.2
dtype: float64
- name: user_name_embd_254.2
dtype: float64
- name: user_name_embd_255.2
dtype: float64
- name: user_name_embd_256.2
dtype: float64
- name: user_name_embd_257.2
dtype: float64
- name: user_name_embd_258.2
dtype: float64
- name: user_name_embd_259.2
dtype: float64
- name: user_name_embd_260.2
dtype: float64
- name: user_name_embd_261.2
dtype: float64
- name: user_name_embd_262.2
dtype: float64
- name: user_name_embd_263.2
dtype: float64
- name: user_name_embd_264.2
dtype: float64
- name: user_name_embd_265.2
dtype: float64
- name: user_name_embd_266.2
dtype: float64
- name: user_name_embd_267.2
dtype: float64
- name: user_name_embd_268.2
dtype: float64
- name: user_name_embd_269.2
dtype: float64
- name: user_name_embd_270.2
dtype: float64
- name: user_name_embd_271.2
dtype: float64
- name: user_name_embd_272.2
dtype: float64
- name: user_name_embd_273.2
dtype: float64
- name: user_name_embd_274.2
dtype: float64
- name: user_name_embd_275.2
dtype: float64
- name: user_name_embd_276.2
dtype: float64
- name: user_name_embd_277.2
dtype: float64
- name: user_name_embd_278.2
dtype: float64
- name: user_name_embd_279.2
dtype: float64
- name: user_name_embd_280.2
dtype: float64
- name: user_name_embd_281.2
dtype: float64
- name: user_name_embd_282.2
dtype: float64
- name: user_name_embd_283.2
dtype: float64
- name: user_name_embd_284.2
dtype: float64
- name: user_name_embd_285.2
dtype: float64
- name: user_name_embd_286.2
dtype: float64
- name: user_name_embd_287.2
dtype: float64
- name: user_name_embd_288.2
dtype: float64
- name: user_name_embd_289.2
dtype: float64
- name: user_name_embd_290.2
dtype: float64
- name: user_name_embd_291.2
dtype: float64
- name: user_name_embd_292.2
dtype: float64
- name: user_name_embd_293.2
dtype: float64
- name: user_name_embd_294.2
dtype: float64
- name: user_name_embd_295.2
dtype: float64
- name: user_name_embd_296.2
dtype: float64
- name: user_name_embd_297.2
dtype: float64
- name: user_name_embd_298.2
dtype: float64
- name: user_name_embd_299.2
dtype: float64
- name: user_name_embd_300.2
dtype: float64
- name: user_name_embd_301.2
dtype: float64
- name: user_name_embd_302.2
dtype: float64
- name: user_name_embd_303.2
dtype: float64
- name: user_name_embd_304.2
dtype: float64
- name: user_name_embd_305.2
dtype: float64
- name: user_name_embd_306.2
dtype: float64
- name: user_name_embd_307.2
dtype: float64
- name: user_name_embd_308.2
dtype: float64
- name: user_name_embd_309.2
dtype: float64
- name: user_name_embd_310.2
dtype: float64
- name: user_name_embd_311.2
dtype: float64
- name: user_name_embd_312.2
dtype: float64
- name: user_name_embd_313.2
dtype: float64
- name: user_name_embd_314.2
dtype: float64
- name: user_name_embd_315.2
dtype: float64
- name: user_name_embd_316.2
dtype: float64
- name: user_name_embd_317.2
dtype: float64
- name: user_name_embd_318.2
dtype: float64
- name: user_name_embd_319.2
dtype: float64
- name: user_name_embd_320.2
dtype: float64
- name: user_name_embd_321.2
dtype: float64
- name: user_name_embd_322.2
dtype: float64
- name: user_name_embd_323.2
dtype: float64
- name: user_name_embd_324.2
dtype: float64
- name: user_name_embd_325.2
dtype: float64
- name: user_name_embd_326.2
dtype: float64
- name: user_name_embd_327.2
dtype: float64
- name: user_name_embd_328.2
dtype: float64
- name: user_name_embd_329.2
dtype: float64
- name: user_name_embd_330.2
dtype: float64
- name: user_name_embd_331.2
dtype: float64
- name: user_name_embd_332.2
dtype: float64
- name: user_name_embd_333.2
dtype: float64
- name: user_name_embd_334.2
dtype: float64
- name: user_name_embd_335.2
dtype: float64
- name: user_name_embd_336.2
dtype: float64
- name: user_name_embd_337.2
dtype: float64
- name: user_name_embd_338.2
dtype: float64
- name: user_name_embd_339.2
dtype: float64
- name: user_name_embd_340.2
dtype: float64
- name: user_name_embd_341.2
dtype: float64
- name: user_name_embd_342.2
dtype: float64
- name: user_name_embd_343.2
dtype: float64
- name: user_name_embd_344.2
dtype: float64
- name: user_name_embd_345.2
dtype: float64
- name: user_name_embd_346.2
dtype: float64
- name: user_name_embd_347.2
dtype: float64
- name: user_name_embd_348.2
dtype: float64
- name: user_name_embd_349.2
dtype: float64
- name: user_name_embd_350.2
dtype: float64
- name: user_name_embd_351.2
dtype: float64
- name: user_name_embd_352.2
dtype: float64
- name: user_name_embd_353.2
dtype: float64
- name: user_name_embd_354.2
dtype: float64
- name: user_name_embd_355.2
dtype: float64
- name: user_name_embd_356.2
dtype: float64
- name: user_name_embd_357.2
dtype: float64
- name: user_name_embd_358.2
dtype: float64
- name: user_name_embd_359.2
dtype: float64
- name: user_name_embd_360.2
dtype: float64
- name: user_name_embd_361.2
dtype: float64
- name: user_name_embd_362.2
dtype: float64
- name: user_name_embd_363.2
dtype: float64
- name: user_name_embd_364.2
dtype: float64
- name: user_name_embd_365.2
dtype: float64
- name: user_name_embd_366.2
dtype: float64
- name: user_name_embd_367.2
dtype: float64
- name: user_name_embd_368.2
dtype: float64
- name: user_name_embd_369.2
dtype: float64
- name: user_name_embd_370.2
dtype: float64
- name: user_name_embd_371.2
dtype: float64
- name: user_name_embd_372.2
dtype: float64
- name: user_name_embd_373.2
dtype: float64
- name: user_name_embd_374.2
dtype: float64
- name: user_name_embd_375.2
dtype: float64
- name: user_name_embd_376.2
dtype: float64
- name: user_name_embd_377.2
dtype: float64
- name: user_name_embd_378.2
dtype: float64
- name: user_name_embd_379.2
dtype: float64
- name: user_name_embd_380.2
dtype: float64
- name: user_name_embd_381.2
dtype: float64
- name: user_name_embd_382.2
dtype: float64
- name: user_name_embd_383.2
dtype: float64
- name: bot_name_embd_0.2
dtype: float64
- name: bot_name_embd_1.2
dtype: float64
- name: bot_name_embd_2.2
dtype: float64
- name: bot_name_embd_3.2
dtype: float64
- name: bot_name_embd_4.2
dtype: float64
- name: bot_name_embd_5.2
dtype: float64
- name: bot_name_embd_6.2
dtype: float64
- name: bot_name_embd_7.2
dtype: float64
- name: bot_name_embd_8.2
dtype: float64
- name: bot_name_embd_9.2
dtype: float64
- name: bot_name_embd_10.2
dtype: float64
- name: bot_name_embd_11.2
dtype: float64
- name: bot_name_embd_12.2
dtype: float64
- name: bot_name_embd_13.2
dtype: float64
- name: bot_name_embd_14.2
dtype: float64
- name: bot_name_embd_15.2
dtype: float64
- name: bot_name_embd_16.2
dtype: float64
- name: bot_name_embd_17.2
dtype: float64
- name: bot_name_embd_18.2
dtype: float64
- name: bot_name_embd_19.2
dtype: float64
- name: bot_name_embd_20.2
dtype: float64
- name: bot_name_embd_21.2
dtype: float64
- name: bot_name_embd_22.2
dtype: float64
- name: bot_name_embd_23.2
dtype: float64
- name: bot_name_embd_24.2
dtype: float64
- name: bot_name_embd_25.2
dtype: float64
- name: bot_name_embd_26.2
dtype: float64
- name: bot_name_embd_27.2
dtype: float64
- name: bot_name_embd_28.2
dtype: float64
- name: bot_name_embd_29.2
dtype: float64
- name: bot_name_embd_30.2
dtype: float64
- name: bot_name_embd_31.2
dtype: float64
- name: bot_name_embd_32.2
dtype: float64
- name: bot_name_embd_33.2
dtype: float64
- name: bot_name_embd_34.2
dtype: float64
- name: bot_name_embd_35.2
dtype: float64
- name: bot_name_embd_36.2
dtype: float64
- name: bot_name_embd_37.2
dtype: float64
- name: bot_name_embd_38.2
dtype: float64
- name: bot_name_embd_39.2
dtype: float64
- name: bot_name_embd_40.2
dtype: float64
- name: bot_name_embd_41.2
dtype: float64
- name: bot_name_embd_42.2
dtype: float64
- name: bot_name_embd_43.2
dtype: float64
- name: bot_name_embd_44.2
dtype: float64
- name: bot_name_embd_45.2
dtype: float64
- name: bot_name_embd_46.2
dtype: float64
- name: bot_name_embd_47.2
dtype: float64
- name: bot_name_embd_48.2
dtype: float64
- name: bot_name_embd_49.2
dtype: float64
- name: bot_name_embd_50.2
dtype: float64
- name: bot_name_embd_51.2
dtype: float64
- name: bot_name_embd_52.2
dtype: float64
- name: bot_name_embd_53.2
dtype: float64
- name: bot_name_embd_54.2
dtype: float64
- name: bot_name_embd_55.2
dtype: float64
- name: bot_name_embd_56.2
dtype: float64
- name: bot_name_embd_57.2
dtype: float64
- name: bot_name_embd_58.2
dtype: float64
- name: bot_name_embd_59.2
dtype: float64
- name: bot_name_embd_60.2
dtype: float64
- name: bot_name_embd_61.2
dtype: float64
- name: bot_name_embd_62.2
dtype: float64
- name: bot_name_embd_63.2
dtype: float64
- name: bot_name_embd_64.2
dtype: float64
- name: bot_name_embd_65.2
dtype: float64
- name: bot_name_embd_66.2
dtype: float64
- name: bot_name_embd_67.2
dtype: float64
- name: bot_name_embd_68.2
dtype: float64
- name: bot_name_embd_69.2
dtype: float64
- name: bot_name_embd_70.2
dtype: float64
- name: bot_name_embd_71.2
dtype: float64
- name: bot_name_embd_72.2
dtype: float64
- name: bot_name_embd_73.2
dtype: float64
- name: bot_name_embd_74.2
dtype: float64
- name: bot_name_embd_75.2
dtype: float64
- name: bot_name_embd_76.2
dtype: float64
- name: bot_name_embd_77.2
dtype: float64
- name: bot_name_embd_78.2
dtype: float64
- name: bot_name_embd_79.2
dtype: float64
- name: bot_name_embd_80.2
dtype: float64
- name: bot_name_embd_81.2
dtype: float64
- name: bot_name_embd_82.2
dtype: float64
- name: bot_name_embd_83.2
dtype: float64
- name: bot_name_embd_84.2
dtype: float64
- name: bot_name_embd_85.2
dtype: float64
- name: bot_name_embd_86.2
dtype: float64
- name: bot_name_embd_87.2
dtype: float64
- name: bot_name_embd_88.2
dtype: float64
- name: bot_name_embd_89.2
dtype: float64
- name: bot_name_embd_90.2
dtype: float64
- name: bot_name_embd_91.2
dtype: float64
- name: bot_name_embd_92.2
dtype: float64
- name: bot_name_embd_93.2
dtype: float64
- name: bot_name_embd_94.2
dtype: float64
- name: bot_name_embd_95.2
dtype: float64
- name: bot_name_embd_96.2
dtype: float64
- name: bot_name_embd_97.2
dtype: float64
- name: bot_name_embd_98.2
dtype: float64
- name: bot_name_embd_99.2
dtype: float64
- name: bot_name_embd_100.2
dtype: float64
- name: bot_name_embd_101.2
dtype: float64
- name: bot_name_embd_102.2
dtype: float64
- name: bot_name_embd_103.2
dtype: float64
- name: bot_name_embd_104.2
dtype: float64
- name: bot_name_embd_105.2
dtype: float64
- name: bot_name_embd_106.2
dtype: float64
- name: bot_name_embd_107.2
dtype: float64
- name: bot_name_embd_108.2
dtype: float64
- name: bot_name_embd_109.2
dtype: float64
- name: bot_name_embd_110.2
dtype: float64
- name: bot_name_embd_111.2
dtype: float64
- name: bot_name_embd_112.2
dtype: float64
- name: bot_name_embd_113.2
dtype: float64
- name: bot_name_embd_114.2
dtype: float64
- name: bot_name_embd_115.2
dtype: float64
- name: bot_name_embd_116.2
dtype: float64
- name: bot_name_embd_117.2
dtype: float64
- name: bot_name_embd_118.2
dtype: float64
- name: bot_name_embd_119.2
dtype: float64
- name: bot_name_embd_120.2
dtype: float64
- name: bot_name_embd_121.2
dtype: float64
- name: bot_name_embd_122.2
dtype: float64
- name: bot_name_embd_123.2
dtype: float64
- name: bot_name_embd_124.2
dtype: float64
- name: bot_name_embd_125.2
dtype: float64
- name: bot_name_embd_126.2
dtype: float64
- name: bot_name_embd_127.2
dtype: float64
- name: bot_name_embd_128.2
dtype: float64
- name: bot_name_embd_129.2
dtype: float64
- name: bot_name_embd_130.2
dtype: float64
- name: bot_name_embd_131.2
dtype: float64
- name: bot_name_embd_132.2
dtype: float64
- name: bot_name_embd_133.2
dtype: float64
- name: bot_name_embd_134.2
dtype: float64
- name: bot_name_embd_135.2
dtype: float64
- name: bot_name_embd_136.2
dtype: float64
- name: bot_name_embd_137.2
dtype: float64
- name: bot_name_embd_138.2
dtype: float64
- name: bot_name_embd_139.2
dtype: float64
- name: bot_name_embd_140.2
dtype: float64
- name: bot_name_embd_141.2
dtype: float64
- name: bot_name_embd_142.2
dtype: float64
- name: bot_name_embd_143.2
dtype: float64
- name: bot_name_embd_144.2
dtype: float64
- name: bot_name_embd_145.2
dtype: float64
- name: bot_name_embd_146.2
dtype: float64
- name: bot_name_embd_147.2
dtype: float64
- name: bot_name_embd_148.2
dtype: float64
- name: bot_name_embd_149.2
dtype: float64
- name: bot_name_embd_150.2
dtype: float64
- name: bot_name_embd_151.2
dtype: float64
- name: bot_name_embd_152.2
dtype: float64
- name: bot_name_embd_153.2
dtype: float64
- name: bot_name_embd_154.2
dtype: float64
- name: bot_name_embd_155.2
dtype: float64
- name: bot_name_embd_156.2
dtype: float64
- name: bot_name_embd_157.2
dtype: float64
- name: bot_name_embd_158.2
dtype: float64
- name: bot_name_embd_159.2
dtype: float64
- name: bot_name_embd_160.2
dtype: float64
- name: bot_name_embd_161.2
dtype: float64
- name: bot_name_embd_162.2
dtype: float64
- name: bot_name_embd_163.2
dtype: float64
- name: bot_name_embd_164.2
dtype: float64
- name: bot_name_embd_165.2
dtype: float64
- name: bot_name_embd_166.2
dtype: float64
- name: bot_name_embd_167.2
dtype: float64
- name: bot_name_embd_168.2
dtype: float64
- name: bot_name_embd_169.2
dtype: float64
- name: bot_name_embd_170.2
dtype: float64
- name: bot_name_embd_171.2
dtype: float64
- name: bot_name_embd_172.2
dtype: float64
- name: bot_name_embd_173.2
dtype: float64
- name: bot_name_embd_174.2
dtype: float64
- name: bot_name_embd_175.2
dtype: float64
- name: bot_name_embd_176.2
dtype: float64
- name: bot_name_embd_177.2
dtype: float64
- name: bot_name_embd_178.2
dtype: float64
- name: bot_name_embd_179.2
dtype: float64
- name: bot_name_embd_180.2
dtype: float64
- name: bot_name_embd_181.2
dtype: float64
- name: bot_name_embd_182.2
dtype: float64
- name: bot_name_embd_183.2
dtype: float64
- name: bot_name_embd_184.2
dtype: float64
- name: bot_name_embd_185.2
dtype: float64
- name: bot_name_embd_186.2
dtype: float64
- name: bot_name_embd_187.2
dtype: float64
- name: bot_name_embd_188.2
dtype: float64
- name: bot_name_embd_189.2
dtype: float64
- name: bot_name_embd_190.2
dtype: float64
- name: bot_name_embd_191.2
dtype: float64
- name: bot_name_embd_192.2
dtype: float64
- name: bot_name_embd_193.2
dtype: float64
- name: bot_name_embd_194.2
dtype: float64
- name: bot_name_embd_195.2
dtype: float64
- name: bot_name_embd_196.2
dtype: float64
- name: bot_name_embd_197.2
dtype: float64
- name: bot_name_embd_198.2
dtype: float64
- name: bot_name_embd_199.2
dtype: float64
- name: bot_name_embd_200.2
dtype: float64
- name: bot_name_embd_201.2
dtype: float64
- name: bot_name_embd_202.2
dtype: float64
- name: bot_name_embd_203.2
dtype: float64
- name: bot_name_embd_204.2
dtype: float64
- name: bot_name_embd_205.2
dtype: float64
- name: bot_name_embd_206.2
dtype: float64
- name: bot_name_embd_207.2
dtype: float64
- name: bot_name_embd_208.2
dtype: float64
- name: bot_name_embd_209.2
dtype: float64
- name: bot_name_embd_210.2
dtype: float64
- name: bot_name_embd_211.2
dtype: float64
- name: bot_name_embd_212.2
dtype: float64
- name: bot_name_embd_213.2
dtype: float64
- name: bot_name_embd_214.2
dtype: float64
- name: bot_name_embd_215.2
dtype: float64
- name: bot_name_embd_216.2
dtype: float64
- name: bot_name_embd_217.2
dtype: float64
- name: bot_name_embd_218.2
dtype: float64
- name: bot_name_embd_219.2
dtype: float64
- name: bot_name_embd_220.2
dtype: float64
- name: bot_name_embd_221.2
dtype: float64
- name: bot_name_embd_222.2
dtype: float64
- name: bot_name_embd_223.2
dtype: float64
- name: bot_name_embd_224.2
dtype: float64
- name: bot_name_embd_225.2
dtype: float64
- name: bot_name_embd_226.2
dtype: float64
- name: bot_name_embd_227.2
dtype: float64
- name: bot_name_embd_228.2
dtype: float64
- name: bot_name_embd_229.2
dtype: float64
- name: bot_name_embd_230.2
dtype: float64
- name: bot_name_embd_231.2
dtype: float64
- name: bot_name_embd_232.2
dtype: float64
- name: bot_name_embd_233.2
dtype: float64
- name: bot_name_embd_234.2
dtype: float64
- name: bot_name_embd_235.2
dtype: float64
- name: bot_name_embd_236.2
dtype: float64
- name: bot_name_embd_237.2
dtype: float64
- name: bot_name_embd_238.2
dtype: float64
- name: bot_name_embd_239.2
dtype: float64
- name: bot_name_embd_240.2
dtype: float64
- name: bot_name_embd_241.2
dtype: float64
- name: bot_name_embd_242.2
dtype: float64
- name: bot_name_embd_243.2
dtype: float64
- name: bot_name_embd_244.2
dtype: float64
- name: bot_name_embd_245.2
dtype: float64
- name: bot_name_embd_246.2
dtype: float64
- name: bot_name_embd_247.2
dtype: float64
- name: bot_name_embd_248.2
dtype: float64
- name: bot_name_embd_249.2
dtype: float64
- name: bot_name_embd_250.2
dtype: float64
- name: bot_name_embd_251.2
dtype: float64
- name: bot_name_embd_252.2
dtype: float64
- name: bot_name_embd_253.2
dtype: float64
- name: bot_name_embd_254.2
dtype: float64
- name: bot_name_embd_255.2
dtype: float64
- name: bot_name_embd_256.2
dtype: float64
- name: bot_name_embd_257.2
dtype: float64
- name: bot_name_embd_258.2
dtype: float64
- name: bot_name_embd_259.2
dtype: float64
- name: bot_name_embd_260.2
dtype: float64
- name: bot_name_embd_261.2
dtype: float64
- name: bot_name_embd_262.2
dtype: float64
- name: bot_name_embd_263.2
dtype: float64
- name: bot_name_embd_264.2
dtype: float64
- name: bot_name_embd_265.2
dtype: float64
- name: bot_name_embd_266.2
dtype: float64
- name: bot_name_embd_267.2
dtype: float64
- name: bot_name_embd_268.2
dtype: float64
- name: bot_name_embd_269.2
dtype: float64
- name: bot_name_embd_270.2
dtype: float64
- name: bot_name_embd_271.2
dtype: float64
- name: bot_name_embd_272.2
dtype: float64
- name: bot_name_embd_273.2
dtype: float64
- name: bot_name_embd_274.2
dtype: float64
- name: bot_name_embd_275.2
dtype: float64
- name: bot_name_embd_276.2
dtype: float64
- name: bot_name_embd_277.2
dtype: float64
- name: bot_name_embd_278.2
dtype: float64
- name: bot_name_embd_279.2
dtype: float64
- name: bot_name_embd_280.2
dtype: float64
- name: bot_name_embd_281.2
dtype: float64
- name: bot_name_embd_282.2
dtype: float64
- name: bot_name_embd_283.2
dtype: float64
- name: bot_name_embd_284.2
dtype: float64
- name: bot_name_embd_285.2
dtype: float64
- name: bot_name_embd_286.2
dtype: float64
- name: bot_name_embd_287.2
dtype: float64
- name: bot_name_embd_288.2
dtype: float64
- name: bot_name_embd_289.2
dtype: float64
- name: bot_name_embd_290.2
dtype: float64
- name: bot_name_embd_291.2
dtype: float64
- name: bot_name_embd_292.2
dtype: float64
- name: bot_name_embd_293.2
dtype: float64
- name: bot_name_embd_294.2
dtype: float64
- name: bot_name_embd_295.2
dtype: float64
- name: bot_name_embd_296.2
dtype: float64
- name: bot_name_embd_297.2
dtype: float64
- name: bot_name_embd_298.2
dtype: float64
- name: bot_name_embd_299.2
dtype: float64
- name: bot_name_embd_300.2
dtype: float64
- name: bot_name_embd_301.2
dtype: float64
- name: bot_name_embd_302.2
dtype: float64
- name: bot_name_embd_303.2
dtype: float64
- name: bot_name_embd_304.2
dtype: float64
- name: bot_name_embd_305.2
dtype: float64
- name: bot_name_embd_306.2
dtype: float64
- name: bot_name_embd_307.2
dtype: float64
- name: bot_name_embd_308.2
dtype: float64
- name: bot_name_embd_309.2
dtype: float64
- name: bot_name_embd_310.2
dtype: float64
- name: bot_name_embd_311.2
dtype: float64
- name: bot_name_embd_312.2
dtype: float64
- name: bot_name_embd_313.2
dtype: float64
- name: bot_name_embd_314.2
dtype: float64
- name: bot_name_embd_315.2
dtype: float64
- name: bot_name_embd_316.2
dtype: float64
- name: bot_name_embd_317.2
dtype: float64
- name: bot_name_embd_318.2
dtype: float64
- name: bot_name_embd_319.2
dtype: float64
- name: bot_name_embd_320.2
dtype: float64
- name: bot_name_embd_321.2
dtype: float64
- name: bot_name_embd_322.2
dtype: float64
- name: bot_name_embd_323.2
dtype: float64
- name: bot_name_embd_324.2
dtype: float64
- name: bot_name_embd_325.2
dtype: float64
- name: bot_name_embd_326.2
dtype: float64
- name: bot_name_embd_327.2
dtype: float64
- name: bot_name_embd_328.2
dtype: float64
- name: bot_name_embd_329.2
dtype: float64
- name: bot_name_embd_330.2
dtype: float64
- name: bot_name_embd_331.2
dtype: float64
- name: bot_name_embd_332.2
dtype: float64
- name: bot_name_embd_333.2
dtype: float64
- name: bot_name_embd_334.2
dtype: float64
- name: bot_name_embd_335.2
dtype: float64
- name: bot_name_embd_336.2
dtype: float64
- name: bot_name_embd_337.2
dtype: float64
- name: bot_name_embd_338.2
dtype: float64
- name: bot_name_embd_339.2
dtype: float64
- name: bot_name_embd_340.2
dtype: float64
- name: bot_name_embd_341.2
dtype: float64
- name: bot_name_embd_342.2
dtype: float64
- name: bot_name_embd_343.2
dtype: float64
- name: bot_name_embd_344.2
dtype: float64
- name: bot_name_embd_345.2
dtype: float64
- name: bot_name_embd_346.2
dtype: float64
- name: bot_name_embd_347.2
dtype: float64
- name: bot_name_embd_348.2
dtype: float64
- name: bot_name_embd_349.2
dtype: float64
- name: bot_name_embd_350.2
dtype: float64
- name: bot_name_embd_351.2
dtype: float64
- name: bot_name_embd_352.2
dtype: float64
- name: bot_name_embd_353.2
dtype: float64
- name: bot_name_embd_354.2
dtype: float64
- name: bot_name_embd_355.2
dtype: float64
- name: bot_name_embd_356.2
dtype: float64
- name: bot_name_embd_357.2
dtype: float64
- name: bot_name_embd_358.2
dtype: float64
- name: bot_name_embd_359.2
dtype: float64
- name: bot_name_embd_360.2
dtype: float64
- name: bot_name_embd_361.2
dtype: float64
- name: bot_name_embd_362.2
dtype: float64
- name: bot_name_embd_363.2
dtype: float64
- name: bot_name_embd_364.2
dtype: float64
- name: bot_name_embd_365.2
dtype: float64
- name: bot_name_embd_366.2
dtype: float64
- name: bot_name_embd_367.2
dtype: float64
- name: bot_name_embd_368.2
dtype: float64
- name: bot_name_embd_369.2
dtype: float64
- name: bot_name_embd_370.2
dtype: float64
- name: bot_name_embd_371.2
dtype: float64
- name: bot_name_embd_372.2
dtype: float64
- name: bot_name_embd_373.2
dtype: float64
- name: bot_name_embd_374.2
dtype: float64
- name: bot_name_embd_375.2
dtype: float64
- name: bot_name_embd_376.2
dtype: float64
- name: bot_name_embd_377.2
dtype: float64
- name: bot_name_embd_378.2
dtype: float64
- name: bot_name_embd_379.2
dtype: float64
- name: bot_name_embd_380.2
dtype: float64
- name: bot_name_embd_381.2
dtype: float64
- name: bot_name_embd_382.2
dtype: float64
- name: bot_name_embd_383.2
dtype: float64
- name: bot_persona_embd_0.2
dtype: float64
- name: bot_persona_embd_1.2
dtype: float64
- name: bot_persona_embd_2.2
dtype: float64
- name: bot_persona_embd_3.2
dtype: float64
- name: bot_persona_embd_4.2
dtype: float64
- name: bot_persona_embd_5.2
dtype: float64
- name: bot_persona_embd_6.2
dtype: float64
- name: bot_persona_embd_7.2
dtype: float64
- name: bot_persona_embd_8.2
dtype: float64
- name: bot_persona_embd_9.2
dtype: float64
- name: bot_persona_embd_10.2
dtype: float64
- name: bot_persona_embd_11.2
dtype: float64
- name: bot_persona_embd_12.2
dtype: float64
- name: bot_persona_embd_13.2
dtype: float64
- name: bot_persona_embd_14.2
dtype: float64
- name: bot_persona_embd_15.2
dtype: float64
- name: bot_persona_embd_16.2
dtype: float64
- name: bot_persona_embd_17.2
dtype: float64
- name: bot_persona_embd_18.2
dtype: float64
- name: bot_persona_embd_19.2
dtype: float64
- name: bot_persona_embd_20.2
dtype: float64
- name: bot_persona_embd_21.2
dtype: float64
- name: bot_persona_embd_22.2
dtype: float64
- name: bot_persona_embd_23.2
dtype: float64
- name: bot_persona_embd_24.2
dtype: float64
- name: bot_persona_embd_25.2
dtype: float64
- name: bot_persona_embd_26.2
dtype: float64
- name: bot_persona_embd_27.2
dtype: float64
- name: bot_persona_embd_28.2
dtype: float64
- name: bot_persona_embd_29.2
dtype: float64
- name: bot_persona_embd_30.2
dtype: float64
- name: bot_persona_embd_31.2
dtype: float64
- name: bot_persona_embd_32.2
dtype: float64
- name: bot_persona_embd_33.2
dtype: float64
- name: bot_persona_embd_34.2
dtype: float64
- name: bot_persona_embd_35.2
dtype: float64
- name: bot_persona_embd_36.2
dtype: float64
- name: bot_persona_embd_37.2
dtype: float64
- name: bot_persona_embd_38.2
dtype: float64
- name: bot_persona_embd_39.2
dtype: float64
- name: bot_persona_embd_40.2
dtype: float64
- name: bot_persona_embd_41.2
dtype: float64
- name: bot_persona_embd_42.2
dtype: float64
- name: bot_persona_embd_43.2
dtype: float64
- name: bot_persona_embd_44.2
dtype: float64
- name: bot_persona_embd_45.2
dtype: float64
- name: bot_persona_embd_46.2
dtype: float64
- name: bot_persona_embd_47.2
dtype: float64
- name: bot_persona_embd_48.2
dtype: float64
- name: bot_persona_embd_49.2
dtype: float64
- name: bot_persona_embd_50.2
dtype: float64
- name: bot_persona_embd_51.2
dtype: float64
- name: bot_persona_embd_52.2
dtype: float64
- name: bot_persona_embd_53.2
dtype: float64
- name: bot_persona_embd_54.2
dtype: float64
- name: bot_persona_embd_55.2
dtype: float64
- name: bot_persona_embd_56.2
dtype: float64
- name: bot_persona_embd_57.2
dtype: float64
- name: bot_persona_embd_58.2
dtype: float64
- name: bot_persona_embd_59.2
dtype: float64
- name: bot_persona_embd_60.2
dtype: float64
- name: bot_persona_embd_61.2
dtype: float64
- name: bot_persona_embd_62.2
dtype: float64
- name: bot_persona_embd_63.2
dtype: float64
- name: bot_persona_embd_64.2
dtype: float64
- name: bot_persona_embd_65.2
dtype: float64
- name: bot_persona_embd_66.2
dtype: float64
- name: bot_persona_embd_67.2
dtype: float64
- name: bot_persona_embd_68.2
dtype: float64
- name: bot_persona_embd_69.2
dtype: float64
- name: bot_persona_embd_70.2
dtype: float64
- name: bot_persona_embd_71.2
dtype: float64
- name: bot_persona_embd_72.2
dtype: float64
- name: bot_persona_embd_73.2
dtype: float64
- name: bot_persona_embd_74.2
dtype: float64
- name: bot_persona_embd_75.2
dtype: float64
- name: bot_persona_embd_76.2
dtype: float64
- name: bot_persona_embd_77.2
dtype: float64
- name: bot_persona_embd_78.2
dtype: float64
- name: bot_persona_embd_79.2
dtype: float64
- name: bot_persona_embd_80.2
dtype: float64
- name: bot_persona_embd_81.2
dtype: float64
- name: bot_persona_embd_82.2
dtype: float64
- name: bot_persona_embd_83.2
dtype: float64
- name: bot_persona_embd_84.2
dtype: float64
- name: bot_persona_embd_85.2
dtype: float64
- name: bot_persona_embd_86.2
dtype: float64
- name: bot_persona_embd_87.2
dtype: float64
- name: bot_persona_embd_88.2
dtype: float64
- name: bot_persona_embd_89.2
dtype: float64
- name: bot_persona_embd_90.2
dtype: float64
- name: bot_persona_embd_91.2
dtype: float64
- name: bot_persona_embd_92.2
dtype: float64
- name: bot_persona_embd_93.2
dtype: float64
- name: bot_persona_embd_94.2
dtype: float64
- name: bot_persona_embd_95.2
dtype: float64
- name: bot_persona_embd_96.2
dtype: float64
- name: bot_persona_embd_97.2
dtype: float64
- name: bot_persona_embd_98.2
dtype: float64
- name: bot_persona_embd_99.2
dtype: float64
- name: bot_persona_embd_100.2
dtype: float64
- name: bot_persona_embd_101.2
dtype: float64
- name: bot_persona_embd_102.2
dtype: float64
- name: bot_persona_embd_103.2
dtype: float64
- name: bot_persona_embd_104.2
dtype: float64
- name: bot_persona_embd_105.2
dtype: float64
- name: bot_persona_embd_106.2
dtype: float64
- name: bot_persona_embd_107.2
dtype: float64
- name: bot_persona_embd_108.2
dtype: float64
- name: bot_persona_embd_109.2
dtype: float64
- name: bot_persona_embd_110.2
dtype: float64
- name: bot_persona_embd_111.2
dtype: float64
- name: bot_persona_embd_112.2
dtype: float64
- name: bot_persona_embd_113.2
dtype: float64
- name: bot_persona_embd_114.2
dtype: float64
- name: bot_persona_embd_115.2
dtype: float64
- name: bot_persona_embd_116.2
dtype: float64
- name: bot_persona_embd_117.2
dtype: float64
- name: bot_persona_embd_118.2
dtype: float64
- name: bot_persona_embd_119.2
dtype: float64
- name: bot_persona_embd_120.2
dtype: float64
- name: bot_persona_embd_121.2
dtype: float64
- name: bot_persona_embd_122.2
dtype: float64
- name: bot_persona_embd_123.2
dtype: float64
- name: bot_persona_embd_124.2
dtype: float64
- name: bot_persona_embd_125.2
dtype: float64
- name: bot_persona_embd_126.2
dtype: float64
- name: bot_persona_embd_127.2
dtype: float64
- name: bot_persona_embd_128.2
dtype: float64
- name: bot_persona_embd_129.2
dtype: float64
- name: bot_persona_embd_130.2
dtype: float64
- name: bot_persona_embd_131.2
dtype: float64
- name: bot_persona_embd_132.2
dtype: float64
- name: bot_persona_embd_133.2
dtype: float64
- name: bot_persona_embd_134.2
dtype: float64
- name: bot_persona_embd_135.2
dtype: float64
- name: bot_persona_embd_136.2
dtype: float64
- name: bot_persona_embd_137.2
dtype: float64
- name: bot_persona_embd_138.2
dtype: float64
- name: bot_persona_embd_139.2
dtype: float64
- name: bot_persona_embd_140.2
dtype: float64
- name: bot_persona_embd_141.2
dtype: float64
- name: bot_persona_embd_142.2
dtype: float64
- name: bot_persona_embd_143.2
dtype: float64
- name: bot_persona_embd_144.2
dtype: float64
- name: bot_persona_embd_145.2
dtype: float64
- name: bot_persona_embd_146.2
dtype: float64
- name: bot_persona_embd_147.2
dtype: float64
- name: bot_persona_embd_148.2
dtype: float64
- name: bot_persona_embd_149.2
dtype: float64
- name: bot_persona_embd_150.2
dtype: float64
- name: bot_persona_embd_151.2
dtype: float64
- name: bot_persona_embd_152.2
dtype: float64
- name: bot_persona_embd_153.2
dtype: float64
- name: bot_persona_embd_154.2
dtype: float64
- name: bot_persona_embd_155.2
dtype: float64
- name: bot_persona_embd_156.2
dtype: float64
- name: bot_persona_embd_157.2
dtype: float64
- name: bot_persona_embd_158.2
dtype: float64
- name: bot_persona_embd_159.2
dtype: float64
- name: bot_persona_embd_160.2
dtype: float64
- name: bot_persona_embd_161.2
dtype: float64
- name: bot_persona_embd_162.2
dtype: float64
- name: bot_persona_embd_163.2
dtype: float64
- name: bot_persona_embd_164.2
dtype: float64
- name: bot_persona_embd_165.2
dtype: float64
- name: bot_persona_embd_166.2
dtype: float64
- name: bot_persona_embd_167.2
dtype: float64
- name: bot_persona_embd_168.2
dtype: float64
- name: bot_persona_embd_169.2
dtype: float64
- name: bot_persona_embd_170.2
dtype: float64
- name: bot_persona_embd_171.2
dtype: float64
- name: bot_persona_embd_172.2
dtype: float64
- name: bot_persona_embd_173.2
dtype: float64
- name: bot_persona_embd_174.2
dtype: float64
- name: bot_persona_embd_175.2
dtype: float64
- name: bot_persona_embd_176.2
dtype: float64
- name: bot_persona_embd_177.2
dtype: float64
- name: bot_persona_embd_178.2
dtype: float64
- name: bot_persona_embd_179.2
dtype: float64
- name: bot_persona_embd_180.2
dtype: float64
- name: bot_persona_embd_181.2
dtype: float64
- name: bot_persona_embd_182.2
dtype: float64
- name: bot_persona_embd_183.2
dtype: float64
- name: bot_persona_embd_184.2
dtype: float64
- name: bot_persona_embd_185.2
dtype: float64
- name: bot_persona_embd_186.2
dtype: float64
- name: bot_persona_embd_187.2
dtype: float64
- name: bot_persona_embd_188.2
dtype: float64
- name: bot_persona_embd_189.2
dtype: float64
- name: bot_persona_embd_190.2
dtype: float64
- name: bot_persona_embd_191.2
dtype: float64
- name: bot_persona_embd_192.2
dtype: float64
- name: bot_persona_embd_193.2
dtype: float64
- name: bot_persona_embd_194.2
dtype: float64
- name: bot_persona_embd_195.2
dtype: float64
- name: bot_persona_embd_196.2
dtype: float64
- name: bot_persona_embd_197.2
dtype: float64
- name: bot_persona_embd_198.2
dtype: float64
- name: bot_persona_embd_199.2
dtype: float64
- name: bot_persona_embd_200.2
dtype: float64
- name: bot_persona_embd_201.2
dtype: float64
- name: bot_persona_embd_202.2
dtype: float64
- name: bot_persona_embd_203.2
dtype: float64
- name: bot_persona_embd_204.2
dtype: float64
- name: bot_persona_embd_205.2
dtype: float64
- name: bot_persona_embd_206.2
dtype: float64
- name: bot_persona_embd_207.2
dtype: float64
- name: bot_persona_embd_208.2
dtype: float64
- name: bot_persona_embd_209.2
dtype: float64
- name: bot_persona_embd_210.2
dtype: float64
- name: bot_persona_embd_211.2
dtype: float64
- name: bot_persona_embd_212.2
dtype: float64
- name: bot_persona_embd_213.2
dtype: float64
- name: bot_persona_embd_214.2
dtype: float64
- name: bot_persona_embd_215.2
dtype: float64
- name: bot_persona_embd_216.2
dtype: float64
- name: bot_persona_embd_217.2
dtype: float64
- name: bot_persona_embd_218.2
dtype: float64
- name: bot_persona_embd_219.2
dtype: float64
- name: bot_persona_embd_220.2
dtype: float64
- name: bot_persona_embd_221.2
dtype: float64
- name: bot_persona_embd_222.2
dtype: float64
- name: bot_persona_embd_223.2
dtype: float64
- name: bot_persona_embd_224.2
dtype: float64
- name: bot_persona_embd_225.2
dtype: float64
- name: bot_persona_embd_226.2
dtype: float64
- name: bot_persona_embd_227.2
dtype: float64
- name: bot_persona_embd_228.2
dtype: float64
- name: bot_persona_embd_229.2
dtype: float64
- name: bot_persona_embd_230.2
dtype: float64
- name: bot_persona_embd_231.2
dtype: float64
- name: bot_persona_embd_232.2
dtype: float64
- name: bot_persona_embd_233.2
dtype: float64
- name: bot_persona_embd_234.2
dtype: float64
- name: bot_persona_embd_235.2
dtype: float64
- name: bot_persona_embd_236.2
dtype: float64
- name: bot_persona_embd_237.2
dtype: float64
- name: bot_persona_embd_238.2
dtype: float64
- name: bot_persona_embd_239.2
dtype: float64
- name: bot_persona_embd_240.2
dtype: float64
- name: bot_persona_embd_241.2
dtype: float64
- name: bot_persona_embd_242.2
dtype: float64
- name: bot_persona_embd_243.2
dtype: float64
- name: bot_persona_embd_244.2
dtype: float64
- name: bot_persona_embd_245.2
dtype: float64
- name: bot_persona_embd_246.2
dtype: float64
- name: bot_persona_embd_247.2
dtype: float64
- name: bot_persona_embd_248.2
dtype: float64
- name: bot_persona_embd_249.2
dtype: float64
- name: bot_persona_embd_250.2
dtype: float64
- name: bot_persona_embd_251.2
dtype: float64
- name: bot_persona_embd_252.2
dtype: float64
- name: bot_persona_embd_253.2
dtype: float64
- name: bot_persona_embd_254.2
dtype: float64
- name: bot_persona_embd_255.2
dtype: float64
- name: bot_persona_embd_256.2
dtype: float64
- name: bot_persona_embd_257.2
dtype: float64
- name: bot_persona_embd_258.2
dtype: float64
- name: bot_persona_embd_259.2
dtype: float64
- name: bot_persona_embd_260.2
dtype: float64
- name: bot_persona_embd_261.2
dtype: float64
- name: bot_persona_embd_262.2
dtype: float64
- name: bot_persona_embd_263.2
dtype: float64
- name: bot_persona_embd_264.2
dtype: float64
- name: bot_persona_embd_265.2
dtype: float64
- name: bot_persona_embd_266.2
dtype: float64
- name: bot_persona_embd_267.2
dtype: float64
- name: bot_persona_embd_268.2
dtype: float64
- name: bot_persona_embd_269.2
dtype: float64
- name: bot_persona_embd_270.2
dtype: float64
- name: bot_persona_embd_271.2
dtype: float64
- name: bot_persona_embd_272.2
dtype: float64
- name: bot_persona_embd_273.2
dtype: float64
- name: bot_persona_embd_274.2
dtype: float64
- name: bot_persona_embd_275.2
dtype: float64
- name: bot_persona_embd_276.2
dtype: float64
- name: bot_persona_embd_277.2
dtype: float64
- name: bot_persona_embd_278.2
dtype: float64
- name: bot_persona_embd_279.2
dtype: float64
- name: bot_persona_embd_280.2
dtype: float64
- name: bot_persona_embd_281.2
dtype: float64
- name: bot_persona_embd_282.2
dtype: float64
- name: bot_persona_embd_283.2
dtype: float64
- name: bot_persona_embd_284.2
dtype: float64
- name: bot_persona_embd_285.2
dtype: float64
- name: bot_persona_embd_286.2
dtype: float64
- name: bot_persona_embd_287.2
dtype: float64
- name: bot_persona_embd_288.2
dtype: float64
- name: bot_persona_embd_289.2
dtype: float64
- name: bot_persona_embd_290.2
dtype: float64
- name: bot_persona_embd_291.2
dtype: float64
- name: bot_persona_embd_292.2
dtype: float64
- name: bot_persona_embd_293.2
dtype: float64
- name: bot_persona_embd_294.2
dtype: float64
- name: bot_persona_embd_295.2
dtype: float64
- name: bot_persona_embd_296.2
dtype: float64
- name: bot_persona_embd_297.2
dtype: float64
- name: bot_persona_embd_298.2
dtype: float64
- name: bot_persona_embd_299.2
dtype: float64
- name: bot_persona_embd_300.2
dtype: float64
- name: bot_persona_embd_301.2
dtype: float64
- name: bot_persona_embd_302.2
dtype: float64
- name: bot_persona_embd_303.2
dtype: float64
- name: bot_persona_embd_304.2
dtype: float64
- name: bot_persona_embd_305.2
dtype: float64
- name: bot_persona_embd_306.2
dtype: float64
- name: bot_persona_embd_307.2
dtype: float64
- name: bot_persona_embd_308.2
dtype: float64
- name: bot_persona_embd_309.2
dtype: float64
- name: bot_persona_embd_310.2
dtype: float64
- name: bot_persona_embd_311.2
dtype: float64
- name: bot_persona_embd_312.2
dtype: float64
- name: bot_persona_embd_313.2
dtype: float64
- name: bot_persona_embd_314.2
dtype: float64
- name: bot_persona_embd_315.2
dtype: float64
- name: bot_persona_embd_316.2
dtype: float64
- name: bot_persona_embd_317.2
dtype: float64
- name: bot_persona_embd_318.2
dtype: float64
- name: bot_persona_embd_319.2
dtype: float64
- name: bot_persona_embd_320.2
dtype: float64
- name: bot_persona_embd_321.2
dtype: float64
- name: bot_persona_embd_322.2
dtype: float64
- name: bot_persona_embd_323.2
dtype: float64
- name: bot_persona_embd_324.2
dtype: float64
- name: bot_persona_embd_325.2
dtype: float64
- name: bot_persona_embd_326.2
dtype: float64
- name: bot_persona_embd_327.2
dtype: float64
- name: bot_persona_embd_328.2
dtype: float64
- name: bot_persona_embd_329.2
dtype: float64
- name: bot_persona_embd_330.2
dtype: float64
- name: bot_persona_embd_331.2
dtype: float64
- name: bot_persona_embd_332.2
dtype: float64
- name: bot_persona_embd_333.2
dtype: float64
- name: bot_persona_embd_334.2
dtype: float64
- name: bot_persona_embd_335.2
dtype: float64
- name: bot_persona_embd_336.2
dtype: float64
- name: bot_persona_embd_337.2
dtype: float64
- name: bot_persona_embd_338.2
dtype: float64
- name: bot_persona_embd_339.2
dtype: float64
- name: bot_persona_embd_340.2
dtype: float64
- name: bot_persona_embd_341.2
dtype: float64
- name: bot_persona_embd_342.2
dtype: float64
- name: bot_persona_embd_343.2
dtype: float64
- name: bot_persona_embd_344.2
dtype: float64
- name: bot_persona_embd_345.2
dtype: float64
- name: bot_persona_embd_346.2
dtype: float64
- name: bot_persona_embd_347.2
dtype: float64
- name: bot_persona_embd_348.2
dtype: float64
- name: bot_persona_embd_349.2
dtype: float64
- name: bot_persona_embd_350.2
dtype: float64
- name: bot_persona_embd_351.2
dtype: float64
- name: bot_persona_embd_352.2
dtype: float64
- name: bot_persona_embd_353.2
dtype: float64
- name: bot_persona_embd_354.2
dtype: float64
- name: bot_persona_embd_355.2
dtype: float64
- name: bot_persona_embd_356.2
dtype: float64
- name: bot_persona_embd_357.2
dtype: float64
- name: bot_persona_embd_358.2
dtype: float64
- name: bot_persona_embd_359.2
dtype: float64
- name: bot_persona_embd_360.2
dtype: float64
- name: bot_persona_embd_361.2
dtype: float64
- name: bot_persona_embd_362.2
dtype: float64
- name: bot_persona_embd_363.2
dtype: float64
- name: bot_persona_embd_364.2
dtype: float64
- name: bot_persona_embd_365.2
dtype: float64
- name: bot_persona_embd_366.2
dtype: float64
- name: bot_persona_embd_367.2
dtype: float64
- name: bot_persona_embd_368.2
dtype: float64
- name: bot_persona_embd_369.2
dtype: float64
- name: bot_persona_embd_370.2
dtype: float64
- name: bot_persona_embd_371.2
dtype: float64
- name: bot_persona_embd_372.2
dtype: float64
- name: bot_persona_embd_373.2
dtype: float64
- name: bot_persona_embd_374.2
dtype: float64
- name: bot_persona_embd_375.2
dtype: float64
- name: bot_persona_embd_376.2
dtype: float64
- name: bot_persona_embd_377.2
dtype: float64
- name: bot_persona_embd_378.2
dtype: float64
- name: bot_persona_embd_379.2
dtype: float64
- name: bot_persona_embd_380.2
dtype: float64
- name: bot_persona_embd_381.2
dtype: float64
- name: bot_persona_embd_382.2
dtype: float64
- name: bot_persona_embd_383.2
dtype: float64
- name: content_type.2
dtype: int64
- name: user_gender.2
dtype: int64
- name: user_age.2
dtype: float64
- name: bot_gender.2
dtype: int64
splits:
- name: train
num_bytes: 567417112
num_examples: 17112
download_size: 313937654
dataset_size: 567417112
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/CultriX__Qwen2.5-14B-Wernicke-SFT-details | open-llm-leaderboard | "2024-11-20T02:21:44Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T02:17:54Z" | ---
pretty_name: Evaluation run of CultriX/Qwen2.5-14B-Wernicke-SFT
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [CultriX/Qwen2.5-14B-Wernicke-SFT](https://huggingface.co/CultriX/Qwen2.5-14B-Wernicke-SFT)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/CultriX__Qwen2.5-14B-Wernicke-SFT-details\"\
,\n\tname=\"CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T02-17-53.848498](https://huggingface.co/datasets/open-llm-leaderboard/CultriX__Qwen2.5-14B-Wernicke-SFT-details/blob/main/CultriX__Qwen2.5-14B-Wernicke-SFT/results_2024-11-20T02-17-53.848498.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"exact_match,none\": 0.3580060422960725,\n \"exact_match_stderr,none\"\
: 0.011785613145328193,\n \"acc_norm,none\": 0.5743935659618628,\n \
\ \"acc_norm_stderr,none\": 0.00513288554021205,\n \"acc,none\"\
: 0.5069813829787234,\n \"acc_stderr,none\": 0.004558026040666208,\n\
\ \"inst_level_strict_acc,none\": 0.5623501199040767,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.45286506469500926,\n \"prompt_level_loose_acc_stderr,none\": 0.02142075394952955,\n\
\ \"prompt_level_strict_acc,none\": 0.42513863216266173,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.02127403980535566,\n \"\
inst_level_loose_acc,none\": 0.5899280575539568,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.6443325811491061,\n \"acc_norm_stderr,none\"\
: 0.005826485983894892,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.916,\n\
\ \"acc_norm_stderr,none\": 0.017578738526776348\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.679144385026738,\n \"acc_norm_stderr,none\"\
: 0.03422783320926161\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.684,\n \"acc_norm_stderr,none\": 0.02946265759857865\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.704,\n\
\ \"acc_norm_stderr,none\": 0.028928939388379697\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.664,\n \"acc_norm_stderr,none\":\
\ 0.029933259094191533\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.744,\n \
\ \"acc_norm_stderr,none\": 0.027657108718204846\n },\n \"\
leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\": \" \
\ - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.608,\n \"acc_norm_stderr,none\": 0.030938207620401222\n },\n\
\ \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n \"\
acc_norm,none\": 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022818\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.936,\n \"acc_norm_stderr,none\": 0.015510587134374122\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.776,\n \"acc_norm_stderr,none\": 0.026421361687347884\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.728,\n \"acc_norm_stderr,none\":\
\ 0.028200088296309975\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115349\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.6438356164383562,\n \"acc_norm_stderr,none\": 0.03976754138601307\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.796,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.792,\n \
\ \"acc_norm_stderr,none\": 0.025721398901416368\n },\n \"\
leaderboard_bbh_salient_translation_error_detection\": {\n \"alias\"\
: \" - leaderboard_bbh_salient_translation_error_detection\",\n \"acc_norm,none\"\
: 0.612,\n \"acc_norm_stderr,none\": 0.030881038748993974\n },\n\
\ \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.7584269662921348,\n \"acc_norm_stderr,none\"\
: 0.032173216138332565\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \
\ \"acc_norm,none\": 0.816,\n \"acc_norm_stderr,none\": 0.02455581299422255\n\
\ },\n \"leaderboard_bbh_temporal_sequences\": {\n \"alias\"\
: \" - leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.736,\n\
\ \"acc_norm_stderr,none\": 0.027934518957690866\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\":\
\ 0.024960691989171963\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\":\
\ 0.029844039047465857\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3540268456375839,\n\
\ \"acc_norm_stderr,none\": 0.013834739341589849,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.30808080808080807,\n \"acc_norm_stderr,none\": 0.03289477330098615\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.336996336996337,\n\
\ \"acc_norm_stderr,none\": 0.02024754173989942\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3950892857142857,\n \"acc_norm_stderr,none\"\
: 0.02312275103172857\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.42513863216266173,\n \"prompt_level_strict_acc_stderr,none\": 0.021274039805355655,\n\
\ \"inst_level_strict_acc,none\": 0.5623501199040767,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.45286506469500926,\n \"prompt_level_loose_acc_stderr,none\": 0.02142075394952955,\n\
\ \"inst_level_loose_acc,none\": 0.5899280575539568,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.3580060422960725,\n \"exact_match_stderr,none\"\
: 0.011785613145328193,\n \"alias\": \" - leaderboard_math_hard\"\n \
\ },\n \"leaderboard_math_algebra_hard\": {\n \"alias\"\
: \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\": 0.6416938110749185,\n\
\ \"exact_match_stderr,none\": 0.02741134634015542\n },\n \
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\": \" \
\ - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.3170731707317073,\n \"exact_match_stderr,none\": 0.0421295596485305\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\"\
: \" - leaderboard_math_geometry_hard\",\n \"exact_match,none\": 0.2196969696969697,\n\
\ \"exact_match_stderr,none\": 0.036174957725402315\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"alias\":\
\ \" - leaderboard_math_intermediate_algebra_hard\",\n \"exact_match,none\"\
: 0.11428571428571428,\n \"exact_match_stderr,none\": 0.019047619047619046\n\
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"alias\"\
: \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\": 0.36363636363636365,\n\
\ \"exact_match_stderr,none\": 0.03889023926786351\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.5647668393782384,\n \"exact_match_stderr,none\"\
: 0.03578038165008584\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.08888888888888889,\n \"exact_match_stderr,none\"\
: 0.02458425268352428\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.5069813829787234,\n\
\ \"acc_stderr,none\": 0.004558026040666208\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.3888888888888889,\n \"acc_norm_stderr,none\"\
: 0.017095291557742226,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\":\
\ \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.568,\n\
\ \"acc_norm_stderr,none\": 0.03139181076542941\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.25,\n \"acc_norm_stderr,none\": 0.02711630722733202\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\"\
: \" - leaderboard_musr_team_allocation\",\n \"acc_norm,none\": 0.352,\n\
\ \"acc_norm_stderr,none\": 0.030266288057359866\n }\n },\n\
\ \"leaderboard\": {\n \"exact_match,none\": 0.3580060422960725,\n \
\ \"exact_match_stderr,none\": 0.011785613145328193,\n \"acc_norm,none\"\
: 0.5743935659618628,\n \"acc_norm_stderr,none\": 0.00513288554021205,\n\
\ \"acc,none\": 0.5069813829787234,\n \"acc_stderr,none\": 0.004558026040666208,\n\
\ \"inst_level_strict_acc,none\": 0.5623501199040767,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.45286506469500926,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.02142075394952955,\n \"prompt_level_strict_acc,none\"\
: 0.42513863216266173,\n \"prompt_level_strict_acc_stderr,none\": 0.02127403980535566,\n\
\ \"inst_level_loose_acc,none\": 0.5899280575539568,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.6443325811491061,\n \"acc_norm_stderr,none\"\
: 0.005826485983894892,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.916,\n \"acc_norm_stderr,none\": 0.017578738526776348\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.679144385026738,\n \"acc_norm_stderr,none\"\
: 0.03422783320926161\n },\n \"leaderboard_bbh_date_understanding\": {\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.684,\n \"acc_norm_stderr,none\": 0.02946265759857865\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.704,\n \"acc_norm_stderr,none\": 0.028928939388379697\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.664,\n \"acc_norm_stderr,none\": 0.029933259094191533\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.744,\n \"acc_norm_stderr,none\": 0.027657108718204846\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.608,\n \"acc_norm_stderr,none\": 0.030938207620401222\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022818\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.936,\n \"acc_norm_stderr,none\": 0.015510587134374122\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.776,\n \"acc_norm_stderr,none\": 0.026421361687347884\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.728,\n \"acc_norm_stderr,none\": 0.028200088296309975\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115349\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.6438356164383562,\n\
\ \"acc_norm_stderr,none\": 0.03976754138601307\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.796,\n \"acc_norm_stderr,none\": 0.025537121574548162\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.792,\n \"acc_norm_stderr,none\": 0.025721398901416368\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.612,\n \"acc_norm_stderr,none\": 0.030881038748993974\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.7584269662921348,\n \"acc_norm_stderr,none\"\
: 0.032173216138332565\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.816,\n \"acc_norm_stderr,none\": 0.02455581299422255\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.736,\n \"acc_norm_stderr,none\": 0.027934518957690866\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\": 0.024960691989171963\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3540268456375839,\n\
\ \"acc_norm_stderr,none\": 0.013834739341589849,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.30808080808080807,\n\
\ \"acc_norm_stderr,none\": 0.03289477330098615\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.336996336996337,\n \"acc_norm_stderr,none\": 0.02024754173989942\n \
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3950892857142857,\n \"acc_norm_stderr,none\"\
: 0.02312275103172857\n },\n \"leaderboard_ifeval\": {\n \"alias\"\
: \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\": 0.42513863216266173,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.021274039805355655,\n \
\ \"inst_level_strict_acc,none\": 0.5623501199040767,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.45286506469500926,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.02142075394952955,\n \"inst_level_loose_acc,none\"\
: 0.5899280575539568,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\n \
\ },\n \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.3580060422960725,\n\
\ \"exact_match_stderr,none\": 0.011785613145328193,\n \"alias\":\
\ \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\": {\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.6416938110749185,\n \"exact_match_stderr,none\": 0.02741134634015542\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.3170731707317073,\n \"exact_match_stderr,none\": 0.0421295596485305\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.2196969696969697,\n \"exact_match_stderr,none\"\
: 0.036174957725402315\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.11428571428571428,\n \"exact_match_stderr,none\"\
: 0.019047619047619046\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.36363636363636365,\n \"exact_match_stderr,none\": 0.03889023926786351\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.5647668393782384,\n \"exact_match_stderr,none\"\
: 0.03578038165008584\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.08888888888888889,\n \"exact_match_stderr,none\": 0.02458425268352428\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.5069813829787234,\n \"acc_stderr,none\": 0.004558026040666208\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.3888888888888889,\n\
\ \"acc_norm_stderr,none\": 0.017095291557742226,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.568,\n \"acc_norm_stderr,none\": 0.03139181076542941\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.25,\n \"acc_norm_stderr,none\": 0.02711630722733202\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.352,\n \"acc_norm_stderr,none\": 0.030266288057359866\n\
\ }\n}\n```"
repo_url: https://huggingface.co/CultriX/Qwen2.5-14B-Wernicke-SFT
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_ifeval
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_ifeval_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T02-17-53.848498.jsonl'
- config_name: CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T02_17_53.848498
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T02-17-53.848498.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T02-17-53.848498.jsonl'
---
# Dataset Card for Evaluation run of CultriX/Qwen2.5-14B-Wernicke-SFT
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [CultriX/Qwen2.5-14B-Wernicke-SFT](https://huggingface.co/CultriX/Qwen2.5-14B-Wernicke-SFT)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/CultriX__Qwen2.5-14B-Wernicke-SFT-details",
name="CultriX__Qwen2.5-14B-Wernicke-SFT__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T02-17-53.848498](https://huggingface.co/datasets/open-llm-leaderboard/CultriX__Qwen2.5-14B-Wernicke-SFT-details/blob/main/CultriX__Qwen2.5-14B-Wernicke-SFT/results_2024-11-20T02-17-53.848498.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"exact_match,none": 0.3580060422960725,
"exact_match_stderr,none": 0.011785613145328193,
"acc_norm,none": 0.5743935659618628,
"acc_norm_stderr,none": 0.00513288554021205,
"acc,none": 0.5069813829787234,
"acc_stderr,none": 0.004558026040666208,
"inst_level_strict_acc,none": 0.5623501199040767,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.45286506469500926,
"prompt_level_loose_acc_stderr,none": 0.02142075394952955,
"prompt_level_strict_acc,none": 0.42513863216266173,
"prompt_level_strict_acc_stderr,none": 0.02127403980535566,
"inst_level_loose_acc,none": 0.5899280575539568,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.6443325811491061,
"acc_norm_stderr,none": 0.005826485983894892,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.916,
"acc_norm_stderr,none": 0.017578738526776348
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.679144385026738,
"acc_norm_stderr,none": 0.03422783320926161
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.684,
"acc_norm_stderr,none": 0.02946265759857865
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.744,
"acc_norm_stderr,none": 0.027657108718204846
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.936,
"acc_norm_stderr,none": 0.015510587134374122
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.776,
"acc_norm_stderr,none": 0.026421361687347884
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.6438356164383562,
"acc_norm_stderr,none": 0.03976754138601307
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.796,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.792,
"acc_norm_stderr,none": 0.025721398901416368
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.612,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.7584269662921348,
"acc_norm_stderr,none": 0.032173216138332565
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.816,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.736,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3540268456375839,
"acc_norm_stderr,none": 0.013834739341589849,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.30808080808080807,
"acc_norm_stderr,none": 0.03289477330098615
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.336996336996337,
"acc_norm_stderr,none": 0.02024754173989942
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3950892857142857,
"acc_norm_stderr,none": 0.02312275103172857
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.42513863216266173,
"prompt_level_strict_acc_stderr,none": 0.021274039805355655,
"inst_level_strict_acc,none": 0.5623501199040767,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.45286506469500926,
"prompt_level_loose_acc_stderr,none": 0.02142075394952955,
"inst_level_loose_acc,none": 0.5899280575539568,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.3580060422960725,
"exact_match_stderr,none": 0.011785613145328193,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.6416938110749185,
"exact_match_stderr,none": 0.02741134634015542
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.3170731707317073,
"exact_match_stderr,none": 0.0421295596485305
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.2196969696969697,
"exact_match_stderr,none": 0.036174957725402315
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.11428571428571428,
"exact_match_stderr,none": 0.019047619047619046
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.36363636363636365,
"exact_match_stderr,none": 0.03889023926786351
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.5647668393782384,
"exact_match_stderr,none": 0.03578038165008584
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.08888888888888889,
"exact_match_stderr,none": 0.02458425268352428
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.5069813829787234,
"acc_stderr,none": 0.004558026040666208
},
"leaderboard_musr": {
"acc_norm,none": 0.3888888888888889,
"acc_norm_stderr,none": 0.017095291557742226,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.25,
"acc_norm_stderr,none": 0.02711630722733202
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
}
},
"leaderboard": {
"exact_match,none": 0.3580060422960725,
"exact_match_stderr,none": 0.011785613145328193,
"acc_norm,none": 0.5743935659618628,
"acc_norm_stderr,none": 0.00513288554021205,
"acc,none": 0.5069813829787234,
"acc_stderr,none": 0.004558026040666208,
"inst_level_strict_acc,none": 0.5623501199040767,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.45286506469500926,
"prompt_level_loose_acc_stderr,none": 0.02142075394952955,
"prompt_level_strict_acc,none": 0.42513863216266173,
"prompt_level_strict_acc_stderr,none": 0.02127403980535566,
"inst_level_loose_acc,none": 0.5899280575539568,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.6443325811491061,
"acc_norm_stderr,none": 0.005826485983894892,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.916,
"acc_norm_stderr,none": 0.017578738526776348
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.679144385026738,
"acc_norm_stderr,none": 0.03422783320926161
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.684,
"acc_norm_stderr,none": 0.02946265759857865
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.704,
"acc_norm_stderr,none": 0.028928939388379697
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.744,
"acc_norm_stderr,none": 0.027657108718204846
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.030938207620401222
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022818
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.936,
"acc_norm_stderr,none": 0.015510587134374122
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.776,
"acc_norm_stderr,none": 0.026421361687347884
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.728,
"acc_norm_stderr,none": 0.028200088296309975
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115349
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.6438356164383562,
"acc_norm_stderr,none": 0.03976754138601307
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.796,
"acc_norm_stderr,none": 0.025537121574548162
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.792,
"acc_norm_stderr,none": 0.025721398901416368
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.612,
"acc_norm_stderr,none": 0.030881038748993974
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.7584269662921348,
"acc_norm_stderr,none": 0.032173216138332565
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.816,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.736,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.024960691989171963
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3540268456375839,
"acc_norm_stderr,none": 0.013834739341589849,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.30808080808080807,
"acc_norm_stderr,none": 0.03289477330098615
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.336996336996337,
"acc_norm_stderr,none": 0.02024754173989942
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3950892857142857,
"acc_norm_stderr,none": 0.02312275103172857
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.42513863216266173,
"prompt_level_strict_acc_stderr,none": 0.021274039805355655,
"inst_level_strict_acc,none": 0.5623501199040767,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.45286506469500926,
"prompt_level_loose_acc_stderr,none": 0.02142075394952955,
"inst_level_loose_acc,none": 0.5899280575539568,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.3580060422960725,
"exact_match_stderr,none": 0.011785613145328193,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.6416938110749185,
"exact_match_stderr,none": 0.02741134634015542
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.3170731707317073,
"exact_match_stderr,none": 0.0421295596485305
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.2196969696969697,
"exact_match_stderr,none": 0.036174957725402315
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.11428571428571428,
"exact_match_stderr,none": 0.019047619047619046
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.36363636363636365,
"exact_match_stderr,none": 0.03889023926786351
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.5647668393782384,
"exact_match_stderr,none": 0.03578038165008584
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.08888888888888889,
"exact_match_stderr,none": 0.02458425268352428
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.5069813829787234,
"acc_stderr,none": 0.004558026040666208
},
"leaderboard_musr": {
"acc_norm,none": 0.3888888888888889,
"acc_norm_stderr,none": 0.017095291557742226,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.25,
"acc_norm_stderr,none": 0.02711630722733202
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359866
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
hoonikoo/emotion_1120 | hoonikoo | "2024-11-20T03:12:08Z" | 9 | 0 | [
"license:apache-2.0",
"size_categories:100K<n<1M",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T03:03:46Z" | ---
license: apache-2.0
---
|
CodeDPO/CodeDPO_data | CodeDPO | "2024-11-20T04:12:33Z" | 9 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T03:33:52Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: tests
sequence: string
- name: model_outputs
list:
- name: model
dtype: string
- name: output
dtype: string
- name: pass_rate
dtype: float64
splits:
- name: train
num_bytes: 10115704466
num_examples: 110966
download_size: 2283035827
dataset_size: 10115704466
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
gswamy/pythia-1.4B-tldr-iter-2 | gswamy | "2024-11-20T05:30:01Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T05:29:12Z" | ---
dataset_info:
features:
- name: info
struct:
- name: id
dtype: string
- name: post
dtype: string
- name: title
dtype: string
- name: subreddit
dtype: string
- name: site
dtype: string
- name: article
dtype: string
- name: summaries
list:
- name: text
dtype: string
- name: policy
dtype: string
- name: note
dtype: string
- name: choice
dtype: int32
- name: worker
dtype: string
- name: batch
dtype: string
- name: split
dtype: string
- name: extra
struct:
- name: confidence
dtype: int32
- name: query_token
sequence: int64
- name: query
dtype: string
- name: response0
dtype: string
- name: response0_token
sequence: int64
- name: response0_token_len
dtype: int64
- name: response0_policy
dtype: string
- name: query_response0
dtype: string
- name: query_response0_token
sequence: int64
- name: query_response0_token_len
dtype: int64
- name: query_response0_token_response_label
sequence: int64
- name: response1
dtype: string
- name: response1_token
sequence: int64
- name: response1_token_len
dtype: int64
- name: response1_policy
dtype: string
- name: query_response1
dtype: string
- name: query_response1_token
sequence: int64
- name: query_response1_token_len
dtype: int64
- name: query_response1_token_response_label
sequence: int64
- name: query_token_len
dtype: int64
- name: policies
dtype: string
- name: iter_2_query_response_0
sequence: int64
- name: iter_2_mask_0
sequence: int64
- name: iter_2_decoded_0
dtype: string
- name: iter_2_reward_0
dtype: float64
- name: iter_2_query_response_1
sequence: int64
- name: iter_2_mask_1
sequence: int64
- name: iter_2_decoded_1
dtype: string
- name: iter_2_reward_1
dtype: float64
splits:
- name: train
num_bytes: 5310568778
num_examples: 92858
download_size: 277689340
dataset_size: 5310568778
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
kayalvizhi42/donut_recepits | kayalvizhi42 | "2024-11-20T05:29:52Z" | 9 | 0 | [
"license:mit",
"region:us"
] | null | "2024-11-20T05:29:52Z" | ---
license: mit
---
|
anonymous20251119/anonymous | anonymous20251119 | "2024-11-20T16:12:58Z" | 9 | 0 | [
"license:cc-by-4.0",
"size_categories:1K<n<10K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T06:05:44Z" | ---
license: cc-by-4.0
configs:
- config_name: default
data_files:
- split: all
path: "all_anno.json"
--- |
hoonikoo/Data_DPO_1120_2 | hoonikoo | "2024-11-20T06:27:30Z" | 9 | 0 | [
"license:apache-2.0",
"size_categories:100K<n<1M",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T06:27:16Z" | ---
license: apache-2.0
---
|
BigCatc/Skywork-Reward-Preference-80K-v0.2-ordinal | BigCatc | "2024-11-20T06:58:49Z" | 9 | 0 | [
"task_categories:text-classification",
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"text-classification"
] | "2024-11-20T06:52:59Z" | ---
dataset_info:
features:
- name: chosen
list:
- name: content
dtype: string
- name: role
dtype: string
- name: rejected
list:
- name: content
dtype: string
- name: role
dtype: string
- name: source
dtype: string
- name: chosen_score
dtype: float64
- name: rejected_score
dtype: float64
splits:
- name: train
num_bytes: 416854646
num_examples: 77016
download_size: 209653621
dataset_size: 416854646
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
task_categories:
- text-classification
---
This dataset is built on top of `Skywork/Skywork-Reward-Preference-80K-v0.2`, and the scores are annotated by `Skywork/Skywork-Reward-Gemma-2-27B-v0.2`. It is used in paper "Reward Modeling with Ordinal Feedback: Wisdom of the Crowd". See the [github repo](https://github.com/LoveCatc/WisdomOfCrowd) for more information. |
Lakshay1Dagar/marketing_dataset_v4_alpaca | Lakshay1Dagar | "2024-11-20T07:06:00Z" | 9 | 0 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T07:05:32Z" | ---
license: apache-2.0
---
|
bitong/SAMSum-Indo | bitong | "2024-11-20T07:12:44Z" | 9 | 0 | [
"task_categories:summarization",
"language:id",
"license:apache-2.0",
"region:us"
] | [
"summarization"
] | "2024-11-20T07:09:58Z" | ---
license: apache-2.0
task_categories:
- summarization
language:
- id
--- |
bitong/DialogSUM-Indo | bitong | "2024-11-20T07:15:35Z" | 9 | 0 | [
"task_categories:summarization",
"language:id",
"license:apache-2.0",
"size_categories:1M<n<10M",
"region:us"
] | [
"summarization"
] | "2024-11-20T07:13:47Z" | ---
license: apache-2.0
task_categories:
- summarization
language:
- id
size_categories:
- 1M<n<10M
--- |
Aiiluo/Chinese-Law-SFT-Dataset | Aiiluo | "2024-11-20T08:27:32Z" | 9 | 0 | [
"license:apache-2.0",
"region:us"
] | null | "2024-11-20T07:23:38Z" | ---
license: apache-2.0
---
本数据集提供了一个基于最新中国法律的结构化数据集,适用于各种自然语言处理任务,特别是针对中国法律领域的指令微调、问答和文本生成等。 目前数据涵盖截至2024年11月1日生效的中国刑法条文,并已剔除失效条款,确保内容的时效性。
数据集采用类似SFT (Supervised Fine-tuning) 数据集的格式,包含以下三个关键字段:
* **instruction (指令):** 描述需要执行的任务或提出的法律问题。默认为”请回答以下法律问题“。
* **input (输入):** 提供与指令相关的上下文信息或具体案例描述。
* **output (输出):** 提供针对指令和输入的法律解答或相应刑法条文。
This dataset provides a structured dataset based on the latest Chinese Law, suitable for various Natural Language Processing (NLP) tasks, especially instruction fine-tuning, question answering, and text generation within the Chinese legal domain. The data covers active articles of the Chinese Criminal Law as of November 1, 2024, with expired articles removed to ensure up-to-date content.
The dataset adopts a format similar to SFT (Supervised Fine-tuning) datasets and includes the following three key fields:
* **instruction (Instruction):** Describes the task to be performed or the legal question posed. Defaults to "Please answer the following legal question."
* **input (Input):** Provides context information or specific case descriptions relevant to the instruction.
* **output (Output):** Provides legal answers or corresponding articles of the Criminal Law in response to the instruction and input.
|
helloelwin/viewdata | helloelwin | "2024-11-20T07:41:50Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T07:41:49Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: chosen
dtype: string
- name: rejected
dtype: string
- name: strength
dtype: int64
- name: acc
dtype: int64
- name: gt_answer
dtype: string
splits:
- name: train
num_bytes: 891080
num_examples: 638
download_size: 498950
dataset_size: 891080
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
oztiger/ChildCare | oztiger | "2024-11-20T08:47:33Z" | 9 | 0 | [
"license:lgpl-lr",
"region:us"
] | null | "2024-11-20T08:47:33Z" | ---
license: lgpl-lr
---
|
tim-lawson/sae-pythia-70m-deduped-x64-k32-tfm-layers-1-dists | tim-lawson | "2024-11-20T09:20:25Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T09:20:23Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 8650752
num_examples: 32768
download_size: 4769122
dataset_size: 8650752
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/icefog72__Ice0.39-19.11-RP-details | open-llm-leaderboard | "2024-11-20T09:37:02Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T09:34:03Z" | ---
pretty_name: Evaluation run of icefog72/Ice0.39-19.11-RP
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [icefog72/Ice0.39-19.11-RP](https://huggingface.co/icefog72/Ice0.39-19.11-RP)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/icefog72__Ice0.39-19.11-RP-details\"\
,\n\tname=\"icefog72__Ice0.39-19.11-RP__leaderboard_bbh_boolean_expressions\",\n\
\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-11-20T09-34-02.349047](https://huggingface.co/datasets/open-llm-leaderboard/icefog72__Ice0.39-19.11-RP-details/blob/main/icefog72__Ice0.39-19.11-RP/results_2024-11-20T09-34-02.349047.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"prompt_level_strict_acc,none\": 0.4177449168207024,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.02122341916161409,\n \"\
inst_level_loose_acc,none\": 0.579136690647482,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.04909365558912387,\n \
\ \"exact_match_stderr,none\": 0.00586773226648738,\n \"acc_norm,none\"\
: 0.46997016474250874,\n \"acc_norm_stderr,none\": 0.005335847414879112,\n\
\ \"inst_level_strict_acc,none\": 0.5335731414868106,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc,none\": 0.3126662234042553,\n\
\ \"acc_stderr,none\": 0.00422642789157279,\n \"prompt_level_loose_acc,none\"\
: 0.46395563770794823,\n \"prompt_level_loose_acc_stderr,none\": 0.02146059282373674,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5077243534108662,\n \"acc_norm_stderr,none\"\
: 0.006148294992940624,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"alias\"\
: \" - leaderboard_bbh_boolean_expressions\",\n \"acc_norm,none\": 0.816,\n\
\ \"acc_norm_stderr,none\": 0.02455581299422255\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6042780748663101,\n \"acc_norm_stderr,none\"\
: 0.035855600715925424\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"alias\": \" - leaderboard_bbh_date_understanding\",\n \
\ \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n\
\ },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\",\n \"acc_norm,none\": 0.684,\n\
\ \"acc_norm_stderr,none\": 0.02946265759857865\n },\n \
\ \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.576,\n \"acc_norm_stderr,none\":\
\ 0.03131803437491622\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\",\n \
\ \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\",\n \"acc_norm,none\": 0.812,\n \
\ \"acc_norm_stderr,none\": 0.02476037772775051\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
,\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n\
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\",\n\
\ \"acc_norm,none\": 0.428,\n \"acc_norm_stderr,none\": 0.031355968923772626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n\
\ \"acc_norm,none\": 0.664,\n \"acc_norm_stderr,none\": 0.029933259094191533\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
alias\": \" - leaderboard_bbh_movie_recommendation\",\n \"acc_norm,none\"\
: 0.624,\n \"acc_norm_stderr,none\": 0.03069633626739458\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\":\
\ 0.03148684942554571\n },\n \"leaderboard_bbh_object_counting\":\
\ {\n \"alias\": \" - leaderboard_bbh_object_counting\",\n \
\ \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
alias\": \" - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\"\
: 0.4520547945205479,\n \"acc_norm_stderr,none\": 0.04133139887430771\n\
\ },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\",\n\
\ \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\",\n \"acc_norm,none\": 0.504,\n \
\ \"acc_norm_stderr,none\": 0.0316851985511992\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
,\n \"acc_norm,none\": 0.444,\n \"acc_norm_stderr,none\":\
\ 0.03148684942554571\n },\n \"leaderboard_bbh_snarks\": {\n \
\ \"alias\": \" - leaderboard_bbh_snarks\",\n \"acc_norm,none\"\
: 0.6348314606741573,\n \"acc_norm_stderr,none\": 0.03619005678691264\n\
\ },\n \"leaderboard_bbh_sports_understanding\": {\n \"\
alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.828,\n \"acc_norm_stderr,none\": 0.02391551394448624\n },\n\
\ \"leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" -\
\ leaderboard_bbh_temporal_sequences\",\n \"acc_norm,none\": 0.264,\n\
\ \"acc_norm_stderr,none\": 0.027934518957690866\n },\n \
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \"\
alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\",\n \
\ \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422255\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\":\
\ 0.02346526100207671\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.288,\n \"acc_norm_stderr,none\":\
\ 0.028697004587398253\n },\n \"leaderboard_bbh_web_of_lies\": {\n\
\ \"alias\": \" - leaderboard_bbh_web_of_lies\",\n \"acc_norm,none\"\
: 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968774\n },\n\
\ \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3104026845637584,\n\
\ \"acc_norm_stderr,none\": 0.013415490417614214,\n \"alias\"\
: \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n\
\ \"alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\"\
: 0.29292929292929293,\n \"acc_norm_stderr,none\": 0.032424979581788145\n\
\ },\n \"leaderboard_gpqa_extended\": {\n \"alias\": \"\
\ - leaderboard_gpqa_extended\",\n \"acc_norm,none\": 0.315018315018315,\n\
\ \"acc_norm_stderr,none\": 0.019897986446530357\n },\n \
\ \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3125,\n \"acc_norm_stderr,none\"\
: 0.021923384489444957\n },\n \"leaderboard_ifeval\": {\n \
\ \"alias\": \" - leaderboard_ifeval\",\n \"prompt_level_strict_acc,none\"\
: 0.4177449168207024,\n \"prompt_level_strict_acc_stderr,none\": 0.02122341916161409,\n\
\ \"inst_level_strict_acc,none\": 0.5335731414868106,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.46395563770794823,\n \"prompt_level_loose_acc_stderr,none\": 0.02146059282373674,\n\
\ \"inst_level_loose_acc,none\": 0.579136690647482,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.04909365558912387,\n \"exact_match_stderr,none\": 0.00586773226648738,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_algebra_hard\",\n \
\ \"exact_match,none\": 0.0781758957654723,\n \"exact_match_stderr,none\"\
: 0.01534616099049543\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\",\n \
\ \"exact_match,none\": 0.032520325203252036,\n \"exact_match_stderr,none\"\
: 0.016058998205879745\n },\n \"leaderboard_math_geometry_hard\":\
\ {\n \"alias\": \" - leaderboard_math_geometry_hard\",\n \
\ \"exact_match,none\": 0.007575757575757576,\n \"exact_match_stderr,none\"\
: 0.007575757575757577\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n\
\ \"exact_match,none\": 0.014285714285714285,\n \"exact_match_stderr,none\"\
: 0.0071043508939153165\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"alias\": \" - leaderboard_math_num_theory_hard\",\n \
\ \"exact_match,none\": 0.045454545454545456,\n \"exact_match_stderr,none\"\
: 0.016839967582612466\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_prealgebra_hard\",\n \
\ \"exact_match,none\": 0.11398963730569948,\n \"exact_match_stderr,none\"\
: 0.022935144053919398\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"alias\": \" - leaderboard_math_precalculus_hard\",\n \
\ \"exact_match,none\": 0.022222222222222223,\n \"exact_match_stderr,none\"\
: 0.01273389971505968\n },\n \"leaderboard_mmlu_pro\": {\n \
\ \"alias\": \" - leaderboard_mmlu_pro\",\n \"acc,none\": 0.3126662234042553,\n\
\ \"acc_stderr,none\": 0.00422642789157279\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.43386243386243384,\n \"acc_norm_stderr,none\"\
: 0.01783000055737664,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\": 0.544,\n\
\ \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \
\ \"leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.3984375,\n \"acc_norm_stderr,none\"\
: 0.030658463859919664\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"alias\": \" - leaderboard_musr_team_allocation\",\n \
\ \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\": 0.03041876402517494\n\
\ }\n },\n \"leaderboard\": {\n \"prompt_level_strict_acc,none\"\
: 0.4177449168207024,\n \"prompt_level_strict_acc_stderr,none\": 0.02122341916161409,\n\
\ \"inst_level_loose_acc,none\": 0.579136690647482,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.04909365558912387,\n \"exact_match_stderr,none\"\
: 0.00586773226648738,\n \"acc_norm,none\": 0.46997016474250874,\n \
\ \"acc_norm_stderr,none\": 0.005335847414879112,\n \"inst_level_strict_acc,none\"\
: 0.5335731414868106,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"acc,none\": 0.3126662234042553,\n \"acc_stderr,none\": 0.00422642789157279,\n\
\ \"prompt_level_loose_acc,none\": 0.46395563770794823,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.02146059282373674,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5077243534108662,\n \"acc_norm_stderr,none\"\
: 0.006148294992940624,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
,\n \"acc_norm,none\": 0.816,\n \"acc_norm_stderr,none\": 0.02455581299422255\n\
\ },\n \"leaderboard_bbh_causal_judgement\": {\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
,\n \"acc_norm,none\": 0.6042780748663101,\n \"acc_norm_stderr,none\"\
: 0.035855600715925424\n },\n \"leaderboard_bbh_date_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\",\n \"acc_norm,none\"\
: 0.404,\n \"acc_norm_stderr,none\": 0.03109668818482536\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\
,\n \"acc_norm,none\": 0.684,\n \"acc_norm_stderr,none\": 0.02946265759857865\n\
\ },\n \"leaderboard_bbh_formal_fallacies\": {\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
,\n \"acc_norm,none\": 0.576,\n \"acc_norm_stderr,none\": 0.03131803437491622\n\
\ },\n \"leaderboard_bbh_geometric_shapes\": {\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
,\n \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_bbh_hyperbaton\": {\n \"alias\": \" - leaderboard_bbh_hyperbaton\"\
,\n \"acc_norm,none\": 0.812,\n \"acc_norm_stderr,none\": 0.02476037772775051\n\
\ },\n \"leaderboard_bbh_logical_deduction_five_objects\": {\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_five_objects\",\n \"acc_norm,none\"\
: 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004\n },\n \"\
leaderboard_bbh_logical_deduction_seven_objects\": {\n \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\
,\n \"acc_norm,none\": 0.428,\n \"acc_norm_stderr,none\": 0.031355968923772626\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \"\
alias\": \" - leaderboard_bbh_logical_deduction_three_objects\",\n \"acc_norm,none\"\
: 0.664,\n \"acc_norm_stderr,none\": 0.029933259094191533\n },\n \"\
leaderboard_bbh_movie_recommendation\": {\n \"alias\": \" - leaderboard_bbh_movie_recommendation\"\
,\n \"acc_norm,none\": 0.624,\n \"acc_norm_stderr,none\": 0.03069633626739458\n\
\ },\n \"leaderboard_bbh_navigate\": {\n \"alias\": \" - leaderboard_bbh_navigate\"\
,\n \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ },\n \"leaderboard_bbh_object_counting\": {\n \"alias\": \" - leaderboard_bbh_object_counting\"\
,\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.03056207062099311\n\
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"alias\": \" \
\ - leaderboard_bbh_penguins_in_a_table\",\n \"acc_norm,none\": 0.4520547945205479,\n\
\ \"acc_norm_stderr,none\": 0.04133139887430771\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\"\
: {\n \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\
,\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254714\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"alias\": \" - leaderboard_bbh_ruin_names\"\
,\n \"acc_norm,none\": 0.504,\n \"acc_norm_stderr,none\": 0.0316851985511992\n\
\ },\n \"leaderboard_bbh_salient_translation_error_detection\": {\n \
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\",\n \
\ \"acc_norm,none\": 0.444,\n \"acc_norm_stderr,none\": 0.03148684942554571\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"alias\": \" - leaderboard_bbh_snarks\"\
,\n \"acc_norm,none\": 0.6348314606741573,\n \"acc_norm_stderr,none\"\
: 0.03619005678691264\n },\n \"leaderboard_bbh_sports_understanding\": {\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\",\n \"acc_norm,none\"\
: 0.828,\n \"acc_norm_stderr,none\": 0.02391551394448624\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
,\n \"acc_norm,none\": 0.264,\n \"acc_norm_stderr,none\": 0.027934518957690866\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
,\n \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422255\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
,\n \"acc_norm,none\": 0.164,\n \"acc_norm_stderr,none\": 0.02346526100207671\n\
\ },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
,\n \"acc_norm,none\": 0.288,\n \"acc_norm_stderr,none\": 0.028697004587398253\n\
\ },\n \"leaderboard_bbh_web_of_lies\": {\n \"alias\": \" - leaderboard_bbh_web_of_lies\"\
,\n \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968774\n\
\ },\n \"leaderboard_gpqa\": {\n \"acc_norm,none\": 0.3104026845637584,\n\
\ \"acc_norm_stderr,none\": 0.013415490417614214,\n \"alias\": \"\
\ - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\": {\n \"\
alias\": \" - leaderboard_gpqa_diamond\",\n \"acc_norm,none\": 0.29292929292929293,\n\
\ \"acc_norm_stderr,none\": 0.032424979581788145\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"alias\": \" - leaderboard_gpqa_extended\",\n \"acc_norm,none\"\
: 0.315018315018315,\n \"acc_norm_stderr,none\": 0.019897986446530357\n \
\ },\n \"leaderboard_gpqa_main\": {\n \"alias\": \" - leaderboard_gpqa_main\"\
,\n \"acc_norm,none\": 0.3125,\n \"acc_norm_stderr,none\": 0.021923384489444957\n\
\ },\n \"leaderboard_ifeval\": {\n \"alias\": \" - leaderboard_ifeval\"\
,\n \"prompt_level_strict_acc,none\": 0.4177449168207024,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.02122341916161409,\n \"inst_level_strict_acc,none\": 0.5335731414868106,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.46395563770794823,\n \"prompt_level_loose_acc_stderr,none\": 0.02146059282373674,\n\
\ \"inst_level_loose_acc,none\": 0.579136690647482,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.04909365558912387,\n \"exact_match_stderr,none\": 0.00586773226648738,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_algebra_hard\",\n \"exact_match,none\"\
: 0.0781758957654723,\n \"exact_match_stderr,none\": 0.01534616099049543\n\
\ },\n \"leaderboard_math_counting_and_prob_hard\": {\n \"alias\":\
\ \" - leaderboard_math_counting_and_prob_hard\",\n \"exact_match,none\"\
: 0.032520325203252036,\n \"exact_match_stderr,none\": 0.016058998205879745\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"alias\": \" - leaderboard_math_geometry_hard\"\
,\n \"exact_match,none\": 0.007575757575757576,\n \"exact_match_stderr,none\"\
: 0.007575757575757577\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\",\n \
\ \"exact_match,none\": 0.014285714285714285,\n \"exact_match_stderr,none\"\
: 0.0071043508939153165\n },\n \"leaderboard_math_num_theory_hard\": {\n \
\ \"alias\": \" - leaderboard_math_num_theory_hard\",\n \"exact_match,none\"\
: 0.045454545454545456,\n \"exact_match_stderr,none\": 0.016839967582612466\n\
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
,\n \"exact_match,none\": 0.11398963730569948,\n \"exact_match_stderr,none\"\
: 0.022935144053919398\n },\n \"leaderboard_math_precalculus_hard\": {\n \
\ \"alias\": \" - leaderboard_math_precalculus_hard\",\n \"exact_match,none\"\
: 0.022222222222222223,\n \"exact_match_stderr,none\": 0.01273389971505968\n\
\ },\n \"leaderboard_mmlu_pro\": {\n \"alias\": \" - leaderboard_mmlu_pro\"\
,\n \"acc,none\": 0.3126662234042553,\n \"acc_stderr,none\": 0.00422642789157279\n\
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.43386243386243384,\n\
\ \"acc_norm_stderr,none\": 0.01783000055737664,\n \"alias\": \" -\
\ leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\",\n \"acc_norm,none\"\
: 0.544,\n \"acc_norm_stderr,none\": 0.031563285061213475\n },\n \"\
leaderboard_musr_object_placements\": {\n \"alias\": \" - leaderboard_musr_object_placements\"\
,\n \"acc_norm,none\": 0.3984375,\n \"acc_norm_stderr,none\": 0.030658463859919664\n\
\ },\n \"leaderboard_musr_team_allocation\": {\n \"alias\": \" - leaderboard_musr_team_allocation\"\
,\n \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\": 0.03041876402517494\n\
\ }\n}\n```"
repo_url: https://huggingface.co/icefog72/Ice0.39-19.11-RP
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_date_understanding
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_navigate
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_object_counting
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_ruin_names
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_snarks
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_gpqa_diamond
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_gpqa_extended
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_gpqa_main
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_ifeval
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_ifeval_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_math_algebra_hard
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_math_geometry_hard
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_math_num_theory_hard
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_math_precalculus_hard
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_mmlu_pro
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_musr_object_placements
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-11-20T09-34-02.349047.jsonl'
- config_name: icefog72__Ice0.39-19.11-RP__leaderboard_musr_team_allocation
data_files:
- split: 2024_11_20T09_34_02.349047
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T09-34-02.349047.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-11-20T09-34-02.349047.jsonl'
---
# Dataset Card for Evaluation run of icefog72/Ice0.39-19.11-RP
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [icefog72/Ice0.39-19.11-RP](https://huggingface.co/icefog72/Ice0.39-19.11-RP)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/icefog72__Ice0.39-19.11-RP-details",
name="icefog72__Ice0.39-19.11-RP__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-11-20T09-34-02.349047](https://huggingface.co/datasets/open-llm-leaderboard/icefog72__Ice0.39-19.11-RP-details/blob/main/icefog72__Ice0.39-19.11-RP/results_2024-11-20T09-34-02.349047.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"prompt_level_strict_acc,none": 0.4177449168207024,
"prompt_level_strict_acc_stderr,none": 0.02122341916161409,
"inst_level_loose_acc,none": 0.579136690647482,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.04909365558912387,
"exact_match_stderr,none": 0.00586773226648738,
"acc_norm,none": 0.46997016474250874,
"acc_norm_stderr,none": 0.005335847414879112,
"inst_level_strict_acc,none": 0.5335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"acc,none": 0.3126662234042553,
"acc_stderr,none": 0.00422642789157279,
"prompt_level_loose_acc,none": 0.46395563770794823,
"prompt_level_loose_acc_stderr,none": 0.02146059282373674,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5077243534108662,
"acc_norm_stderr,none": 0.006148294992940624,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.816,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.035855600715925424
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.684,
"acc_norm_stderr,none": 0.02946265759857865
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.812,
"acc_norm_stderr,none": 0.02476037772775051
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.428,
"acc_norm_stderr,none": 0.031355968923772626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.624,
"acc_norm_stderr,none": 0.03069633626739458
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4520547945205479,
"acc_norm_stderr,none": 0.04133139887430771
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.444,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6348314606741573,
"acc_norm_stderr,none": 0.03619005678691264
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.828,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.288,
"acc_norm_stderr,none": 0.028697004587398253
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3104026845637584,
"acc_norm_stderr,none": 0.013415490417614214,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.29292929292929293,
"acc_norm_stderr,none": 0.032424979581788145
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.315018315018315,
"acc_norm_stderr,none": 0.019897986446530357
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3125,
"acc_norm_stderr,none": 0.021923384489444957
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.4177449168207024,
"prompt_level_strict_acc_stderr,none": 0.02122341916161409,
"inst_level_strict_acc,none": 0.5335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.46395563770794823,
"prompt_level_loose_acc_stderr,none": 0.02146059282373674,
"inst_level_loose_acc,none": 0.579136690647482,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.04909365558912387,
"exact_match_stderr,none": 0.00586773226648738,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0781758957654723,
"exact_match_stderr,none": 0.01534616099049543
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.032520325203252036,
"exact_match_stderr,none": 0.016058998205879745
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.014285714285714285,
"exact_match_stderr,none": 0.0071043508939153165
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.016839967582612466
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.11398963730569948,
"exact_match_stderr,none": 0.022935144053919398
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.022222222222222223,
"exact_match_stderr,none": 0.01273389971505968
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3126662234042553,
"acc_stderr,none": 0.00422642789157279
},
"leaderboard_musr": {
"acc_norm,none": 0.43386243386243384,
"acc_norm_stderr,none": 0.01783000055737664,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.3984375,
"acc_norm_stderr,none": 0.030658463859919664
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.03041876402517494
}
},
"leaderboard": {
"prompt_level_strict_acc,none": 0.4177449168207024,
"prompt_level_strict_acc_stderr,none": 0.02122341916161409,
"inst_level_loose_acc,none": 0.579136690647482,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.04909365558912387,
"exact_match_stderr,none": 0.00586773226648738,
"acc_norm,none": 0.46997016474250874,
"acc_norm_stderr,none": 0.005335847414879112,
"inst_level_strict_acc,none": 0.5335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"acc,none": 0.3126662234042553,
"acc_stderr,none": 0.00422642789157279,
"prompt_level_loose_acc,none": 0.46395563770794823,
"prompt_level_loose_acc_stderr,none": 0.02146059282373674,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5077243534108662,
"acc_norm_stderr,none": 0.006148294992940624,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"alias": " - leaderboard_bbh_boolean_expressions",
"acc_norm,none": 0.816,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_causal_judgement": {
"alias": " - leaderboard_bbh_causal_judgement",
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.035855600715925424
},
"leaderboard_bbh_date_understanding": {
"alias": " - leaderboard_bbh_date_understanding",
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.03109668818482536
},
"leaderboard_bbh_disambiguation_qa": {
"alias": " - leaderboard_bbh_disambiguation_qa",
"acc_norm,none": 0.684,
"acc_norm_stderr,none": 0.02946265759857865
},
"leaderboard_bbh_formal_fallacies": {
"alias": " - leaderboard_bbh_formal_fallacies",
"acc_norm,none": 0.576,
"acc_norm_stderr,none": 0.03131803437491622
},
"leaderboard_bbh_geometric_shapes": {
"alias": " - leaderboard_bbh_geometric_shapes",
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_bbh_hyperbaton": {
"alias": " - leaderboard_bbh_hyperbaton",
"acc_norm,none": 0.812,
"acc_norm_stderr,none": 0.02476037772775051
},
"leaderboard_bbh_logical_deduction_five_objects": {
"alias": " - leaderboard_bbh_logical_deduction_five_objects",
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"alias": " - leaderboard_bbh_logical_deduction_seven_objects",
"acc_norm,none": 0.428,
"acc_norm_stderr,none": 0.031355968923772626
},
"leaderboard_bbh_logical_deduction_three_objects": {
"alias": " - leaderboard_bbh_logical_deduction_three_objects",
"acc_norm,none": 0.664,
"acc_norm_stderr,none": 0.029933259094191533
},
"leaderboard_bbh_movie_recommendation": {
"alias": " - leaderboard_bbh_movie_recommendation",
"acc_norm,none": 0.624,
"acc_norm_stderr,none": 0.03069633626739458
},
"leaderboard_bbh_navigate": {
"alias": " - leaderboard_bbh_navigate",
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_object_counting": {
"alias": " - leaderboard_bbh_object_counting",
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.03056207062099311
},
"leaderboard_bbh_penguins_in_a_table": {
"alias": " - leaderboard_bbh_penguins_in_a_table",
"acc_norm,none": 0.4520547945205479,
"acc_norm_stderr,none": 0.04133139887430771
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"alias": " - leaderboard_bbh_reasoning_about_colored_objects",
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254714
},
"leaderboard_bbh_ruin_names": {
"alias": " - leaderboard_bbh_ruin_names",
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.0316851985511992
},
"leaderboard_bbh_salient_translation_error_detection": {
"alias": " - leaderboard_bbh_salient_translation_error_detection",
"acc_norm,none": 0.444,
"acc_norm_stderr,none": 0.03148684942554571
},
"leaderboard_bbh_snarks": {
"alias": " - leaderboard_bbh_snarks",
"acc_norm,none": 0.6348314606741573,
"acc_norm_stderr,none": 0.03619005678691264
},
"leaderboard_bbh_sports_understanding": {
"alias": " - leaderboard_bbh_sports_understanding",
"acc_norm,none": 0.828,
"acc_norm_stderr,none": 0.02391551394448624
},
"leaderboard_bbh_temporal_sequences": {
"alias": " - leaderboard_bbh_temporal_sequences",
"acc_norm,none": 0.264,
"acc_norm_stderr,none": 0.027934518957690866
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects",
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422255
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects",
"acc_norm,none": 0.164,
"acc_norm_stderr,none": 0.02346526100207671
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects",
"acc_norm,none": 0.288,
"acc_norm_stderr,none": 0.028697004587398253
},
"leaderboard_bbh_web_of_lies": {
"alias": " - leaderboard_bbh_web_of_lies",
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968774
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3104026845637584,
"acc_norm_stderr,none": 0.013415490417614214,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"alias": " - leaderboard_gpqa_diamond",
"acc_norm,none": 0.29292929292929293,
"acc_norm_stderr,none": 0.032424979581788145
},
"leaderboard_gpqa_extended": {
"alias": " - leaderboard_gpqa_extended",
"acc_norm,none": 0.315018315018315,
"acc_norm_stderr,none": 0.019897986446530357
},
"leaderboard_gpqa_main": {
"alias": " - leaderboard_gpqa_main",
"acc_norm,none": 0.3125,
"acc_norm_stderr,none": 0.021923384489444957
},
"leaderboard_ifeval": {
"alias": " - leaderboard_ifeval",
"prompt_level_strict_acc,none": 0.4177449168207024,
"prompt_level_strict_acc_stderr,none": 0.02122341916161409,
"inst_level_strict_acc,none": 0.5335731414868106,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.46395563770794823,
"prompt_level_loose_acc_stderr,none": 0.02146059282373674,
"inst_level_loose_acc,none": 0.579136690647482,
"inst_level_loose_acc_stderr,none": "N/A"
},
"leaderboard_math_hard": {
"exact_match,none": 0.04909365558912387,
"exact_match_stderr,none": 0.00586773226648738,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"alias": " - leaderboard_math_algebra_hard",
"exact_match,none": 0.0781758957654723,
"exact_match_stderr,none": 0.01534616099049543
},
"leaderboard_math_counting_and_prob_hard": {
"alias": " - leaderboard_math_counting_and_prob_hard",
"exact_match,none": 0.032520325203252036,
"exact_match_stderr,none": 0.016058998205879745
},
"leaderboard_math_geometry_hard": {
"alias": " - leaderboard_math_geometry_hard",
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757577
},
"leaderboard_math_intermediate_algebra_hard": {
"alias": " - leaderboard_math_intermediate_algebra_hard",
"exact_match,none": 0.014285714285714285,
"exact_match_stderr,none": 0.0071043508939153165
},
"leaderboard_math_num_theory_hard": {
"alias": " - leaderboard_math_num_theory_hard",
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.016839967582612466
},
"leaderboard_math_prealgebra_hard": {
"alias": " - leaderboard_math_prealgebra_hard",
"exact_match,none": 0.11398963730569948,
"exact_match_stderr,none": 0.022935144053919398
},
"leaderboard_math_precalculus_hard": {
"alias": " - leaderboard_math_precalculus_hard",
"exact_match,none": 0.022222222222222223,
"exact_match_stderr,none": 0.01273389971505968
},
"leaderboard_mmlu_pro": {
"alias": " - leaderboard_mmlu_pro",
"acc,none": 0.3126662234042553,
"acc_stderr,none": 0.00422642789157279
},
"leaderboard_musr": {
"acc_norm,none": 0.43386243386243384,
"acc_norm_stderr,none": 0.01783000055737664,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"alias": " - leaderboard_musr_murder_mysteries",
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.031563285061213475
},
"leaderboard_musr_object_placements": {
"alias": " - leaderboard_musr_object_placements",
"acc_norm,none": 0.3984375,
"acc_norm_stderr,none": 0.030658463859919664
},
"leaderboard_musr_team_allocation": {
"alias": " - leaderboard_musr_team_allocation",
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.03041876402517494
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
proteinglm/temperature_stability | proteinglm | "2024-11-20T10:13:28Z" | 9 | 0 | [
"task_categories:text-classification",
"license:apache-2.0",
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"arxiv:2401.06199",
"region:us",
"chemistry",
"biology"
] | [
"text-classification"
] | "2024-11-20T09:52:55Z" | ---
dataset_info:
features:
- name: seq
dtype: string
- name: label
dtype: int64
splits:
- name: train
num_bytes: 88951983
num_examples: 283057
- name: valid
num_bytes: 19213838
num_examples: 62973
- name: test
num_bytes: 22317993
num_examples: 73205
download_size: 127753697
dataset_size: 130483814
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: valid
path: data/valid-*
- split: test
path: data/test-*
license: apache-2.0
task_categories:
- text-classification
tags:
- chemistry
- biology
size_categories:
- 100K<n<1M
---
# Dataset Card for Temperature Stability Dataset
### Dataset Summary
The accurate prediction of protein thermal stability has far-reaching implications in both academic and industrial spheres. This task primarily aims to predict a protein’s capacity to preserve its structural stability under a temperature condition of 65 degrees Celsius.
## Dataset Structure
### Data Instances
For each instance, there is a string representing the protein sequence and an integer label indicating whether the protein can maintain its structural stability at a temperature of 65 degrees Celsius. See the [temperature stability dataset viewer](https://huggingface.co/datasets/Bo1015/temperature_stability/viewer) to explore more examples.
```
{'seq':'MEHVIDNFDNIDKCLKCGKPIKVVKLKYIKKKIENIPNSHLINFKYCSKCKRENVIENL'
'label':1}
```
The average for the `seq` and the `label` are provided below:
| Feature | Mean Count |
| ---------- | ---------------- |
| seq | 300 |
### Data Fields
- `seq`: a string containing the protein sequence
- `label`: an integer label indicating the structural stability of each sequence.
### Data Splits
The temperature stability dataset has 3 splits: _train_, _valid_, and _test_. Below are the statistics of the dataset.
| Dataset Split | Number of Instances in Split |
| ------------- | ------------------------------------------- |
| Train | 283,057 |
| Valid | 62,973 |
| Test | 73,205 |
### Source Data
#### Initial Data Collection and Normalization
We adapted the dataset strategy from [TemStaPro](https://academic.oup.com/bioinformatics/article/40/4/btae157/7632735).
### Licensing Information
The dataset is released under the [Apache-2.0 License](http://www.apache.org/licenses/LICENSE-2.0).
### Citation
If you find our work useful, please consider citing the following paper:
```
@misc{chen2024xtrimopglm,
title={xTrimoPGLM: unified 100B-scale pre-trained transformer for deciphering the language of protein},
author={Chen, Bo and Cheng, Xingyi and Li, Pan and Geng, Yangli-ao and Gong, Jing and Li, Shen and Bei, Zhilei and Tan, Xu and Wang, Boyan and Zeng, Xin and others},
year={2024},
eprint={2401.06199},
archivePrefix={arXiv},
primaryClass={cs.CL},
note={arXiv preprint arXiv:2401.06199}
}
``` |
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-1-dists | tim-lawson | "2024-11-20T10:05:46Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:05:44Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 13157094
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-4-dists | tim-lawson | "2024-11-20T10:05:50Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:05:48Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 11940160
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-3-dists | tim-lawson | "2024-11-20T10:05:53Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:05:51Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 13521098
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-0-dists | tim-lawson | "2024-11-20T10:05:54Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:05:52Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 13838670
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-2-dists | tim-lawson | "2024-11-20T10:06:02Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:05:59Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 13049371
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-6-dists | tim-lawson | "2024-11-20T10:22:41Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:22:39Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 10518434
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-5-dists | tim-lawson | "2024-11-20T10:22:49Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:22:47Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 11441780
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-7-dists | tim-lawson | "2024-11-20T10:22:52Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:22:48Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 9511873
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-8-dists | tim-lawson | "2024-11-20T10:22:57Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:22:53Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 10317298
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tim-lawson/sae-pythia-160m-deduped-x64-k32-tfm-layers-9-dists | tim-lawson | "2024-11-20T10:22:57Z" | 9 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:22:54Z" | ---
dataset_info:
features:
- name: latent
dtype: int64
- name: count
dtype: float64
- name: total
dtype: float64
- name: mean
dtype: float64
- name: layer_mean
dtype: float64
- name: layer_var
dtype: float64
- name: layer_std
dtype: float64
- name: counts
sequence: float64
- name: totals
sequence: float64
- name: means
sequence: float64
- name: probs
sequence: float64
splits:
- name: train
num_bytes: 22413312
num_examples: 49152
download_size: 14521317
dataset_size: 22413312
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
self-generate/ds_chat_pos_reflct_adamw_iter2_sppo_hard_new_cn_mining_oj_iter2-binarized | self-generate | "2024-11-20T10:24:49Z" | 9 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:24:47Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: chosen
dtype: string
- name: rejected
dtype: string
- name: rejected_traceback
dtype: string
- name: chosen_probs
dtype: float64
- name: chosen_probs_win
dtype: float64
- name: chosen_probs_lose
dtype: float64
splits:
- name: train
num_bytes: 7433151
num_examples: 3020
download_size: 3171858
dataset_size: 7433151
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "ds_chat_pos_reflct_adamw_iter2_sppo_hard_new_cn_mining_oj_iter2-binarized"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
ganga4364/AB_benchmark | ganga4364 | "2024-11-20T10:38:45Z" | 9 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T10:38:43Z" | ---
dataset_info:
features:
- name: file_name
dtype: string
- name: uni
dtype: string
- name: wylie
dtype: string
- name: url
dtype: string
- name: dept
dtype: string
- name: grade
dtype: int64
- name: char_len
dtype: int64
- name: audio_len
dtype: float64
- name: Original ID
dtype: string
- name: Collection
dtype: string
- name: Book Title
dtype: string
- name: Category
dtype: string
- name: Recorder
dtype: string
- name: Music
dtype: bool
- name: Name
dtype: string
- name: Gender
dtype: string
- name: Age
dtype: float64
- name: place
dtype: string
- name: livelihood
dtype: string
- name: birh place
dtype: string
- name: social/marital status
dtype: string
- name: education level
dtype: string
splits:
- name: Name
num_bytes: 1204577
num_examples: 1607
- name: Gender
num_bytes: 1352912
num_examples: 2000
- name: Age
num_bytes: 1139332
num_examples: 1680
- name: place
num_bytes: 1245527
num_examples: 1809
- name: Music
num_bytes: 1342845
num_examples: 2000
download_size: 1968630
dataset_size: 6285193
configs:
- config_name: default
data_files:
- split: Name
path: data/Name-*
- split: Gender
path: data/Gender-*
- split: Age
path: data/Age-*
- split: place
path: data/place-*
- split: Music
path: data/Music-*
---
|
PocketDoc/Dans-Personamaxx-Aesir | PocketDoc | "2024-11-22T11:43:14Z" | 9 | 0 | [
"region:us",
"not-for-all-audiences"
] | null | "2024-11-20T11:04:52Z" | ---
tags:
- not-for-all-audiences
--- |
lewtun/bon-prm-serverless-batched | lewtun | "2024-11-20T11:34:35Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T11:17:48Z" | ---
dataset_info:
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048
features:
- name: problem
dtype: string
- name: solution
dtype: string
- name: answer
dtype: string
- name: subject
dtype: string
- name: level
dtype: int64
- name: unique_id
dtype: string
- name: completions
sequence: string
- name: scores
sequence:
sequence: float64
- name: pred
dtype: string
- name: completion_tokens
dtype: int64
- name: agg_scores
sequence: float64
- name: pred_weighted@1
dtype: string
- name: pred_maj@1
dtype: string
- name: pred_naive@1
dtype: string
splits:
- name: train
num_bytes: 203042
num_examples: 48
download_size: 101483
dataset_size: 203042
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-16
features:
- name: problem
dtype: string
- name: solution
dtype: string
- name: answer
dtype: string
- name: subject
dtype: string
- name: level
dtype: int64
- name: unique_id
dtype: string
- name: completions
sequence: string
- name: scores
sequence:
sequence: float64
- name: pred
dtype: string
- name: completion_tokens
dtype: int64
- name: agg_scores
sequence: float64
- name: pred_weighted@1
dtype: string
- name: pred_maj@1
dtype: string
- name: pred_naive@1
dtype: string
splits:
- name: train
num_bytes: 210484
num_examples: 48
download_size: 101732
dataset_size: 210484
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-2
features:
- name: problem
dtype: string
- name: solution
dtype: string
- name: answer
dtype: string
- name: subject
dtype: string
- name: level
dtype: int64
- name: unique_id
dtype: string
- name: completions
sequence: string
- name: scores
sequence:
sequence: float64
- name: pred
dtype: string
- name: completion_tokens
dtype: int64
- name: agg_scores
sequence: float64
- name: pred_weighted@1
dtype: string
- name: pred_maj@1
dtype: string
- name: pred_naive@1
dtype: string
splits:
- name: train
num_bytes: 246355
num_examples: 48
download_size: 110612
dataset_size: 246355
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-4
features:
- name: problem
dtype: string
- name: solution
dtype: string
- name: answer
dtype: string
- name: subject
dtype: string
- name: level
dtype: int64
- name: unique_id
dtype: string
- name: completions
sequence: string
- name: scores
sequence:
sequence: float64
- name: pred
dtype: string
- name: completion_tokens
dtype: int64
- name: agg_scores
sequence: float64
- name: pred_weighted@1
dtype: string
- name: pred_maj@1
dtype: string
- name: pred_naive@1
dtype: string
splits:
- name: train
num_bytes: 244251
num_examples: 48
download_size: 112373
dataset_size: 244251
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-8
features:
- name: problem
dtype: string
- name: solution
dtype: string
- name: answer
dtype: string
- name: subject
dtype: string
- name: level
dtype: int64
- name: unique_id
dtype: string
- name: completions
sequence: string
- name: scores
sequence:
sequence: float64
- name: pred
dtype: string
- name: completion_tokens
dtype: int64
- name: agg_scores
sequence: float64
- name: pred_weighted@1
dtype: string
- name: pred_maj@1
dtype: string
- name: pred_naive@1
dtype: string
splits:
- name: train
num_bytes: 203042
num_examples: 48
download_size: 101483
dataset_size: 203042
configs:
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048
data_files:
- split: train
path: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048/train-*
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-16
data_files:
- split: train
path: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-16/train-*
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-2
data_files:
- split: train
path: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-2/train-*
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-4
data_files:
- split: train
path: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-4/train-*
- config_name: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-8
data_files:
- split: train
path: HuggingFaceH4_MATH-500--agg_strategy-last--T-0.0--top_p-1.0--n-1--max_tokens-2048--bsz-8/train-*
---
|
ADHIZ/benz | ADHIZ | "2024-11-20T11:28:23Z" | 9 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T11:28:17Z" | ---
dataset_info:
features:
- name: context
dtype: string
- name: question
dtype: string
- name: answers
dtype: string
splits:
- name: train
num_bytes: 6715163
num_examples: 7598
download_size: 1204118
dataset_size: 6715163
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
ybracke/dtaec-revised-2024-lexicon-test | ybracke | "2024-11-21T13:05:49Z" | 9 | 0 | [
"license:cc",
"size_categories:n<1K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T11:33:45Z" | ---
license: cc
---
<iframe src="https://huggingface.co/datasets/ybracke/dtaec-revised-2024-lexicon-test/embed/sql-console/VReoqJw" frameborder="0" width="100%" height="560px"></iframe> |
oserikov/pmi-3 | oserikov | "2024-11-20T11:59:19Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T11:59:17Z" | ---
dataset_info:
features:
- name: all
struct:
- name: interlinear-text
list:
- name: item
struct:
- name: source
dtype: string
- name: paragraph
list:
- name: item
struct:
- name: speaker
dtype: string
- name: phrase
sequence: 'null'
- name: item
dtype: 'null'
splits:
- name: train
num_bytes: 1959
num_examples: 1
download_size: 3944
dataset_size: 1959
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
vantral/erzya_me | vantral | "2024-11-20T12:38:47Z" | 9 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T12:38:41Z" | ---
dataset_info:
features:
- name: all
struct:
- name: interlinear-text
list:
- name: item
struct:
- name: source
dtype: string
- name: paragraph
list:
- name: item
struct:
- name: speaker
dtype: string
- name: phrase
list:
- name: item
struct:
- name: ft
dtype: string
- name: id
dtype: string
- name: participant
dtype: string
- name: timestamp
sequence: string
- name: word
list:
list:
- name: item
struct:
- name: grammar_tags
sequence: string
- name: translation
sequence: string
- name: txt
dtype: string
- name: morph
list:
- name: item
struct:
- name: gls
dtype: string
- name: id
dtype: string
- name: txt
dtype: string
- name: item
dtype: 'null'
splits:
- name: train
num_bytes: 29152
num_examples: 1
download_size: 23153
dataset_size: 29152
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
PassbyGrocer/CAMO | PassbyGrocer | "2024-11-20T12:44:29Z" | 9 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-11-20T12:43:56Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: gt
dtype: image
splits:
- name: train
num_bytes: 178399642.0
num_examples: 1000
- name: validation
num_bytes: 53315901.0
num_examples: 250
- name: test
num_bytes: 53315901.0
num_examples: 250
download_size: 264586106
dataset_size: 285031444.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|