Edit model card

JOSIE_Beta-8-7B-slerp

JOSIE_Beta-8-7B-slerp is a merge of the following models using LazyMergekit:

Important!!!

Upon seing the eval benchmarks on the LLM Leaderbard this model performs the worst. the best performing one (in the leaderboard) is the 3 beta version.

{
    "all": {
        "acc": 0.6212846416057433,
        "acc_stderr": 0.03289607423593368,
        "acc_norm": 0.6268274539918854,
        "acc_norm_stderr": 0.03356884635772938,
        "mc1": 0.3157894736842105,
        "mc1_stderr": 0.016272287957916923,
        "mc2": 0.4868797251828956,
        "mc2_stderr": 0.01529943410920313
    },
    "harness|arc:challenge|25": {
        "acc": 0.5776450511945392,
        "acc_stderr": 0.014434138713379981,
        "acc_norm": 0.6040955631399317,
        "acc_norm_stderr": 0.014291228393536592
    },
    "harness|hellaswag|10": {
        "acc": 0.6363274248157738,
        "acc_stderr": 0.004800728138792394,
        "acc_norm": 0.8365863373829915,
        "acc_norm_stderr": 0.0036898701424130753
    },
    "harness|hendrycksTest-abstract_algebra|5": {
        "acc": 0.31,
        "acc_stderr": 0.046482319871173156,
        "acc_norm": 0.31,
        "acc_norm_stderr": 0.046482319871173156
    },
    "harness|hendrycksTest-anatomy|5": {
        "acc": 0.6,
        "acc_stderr": 0.04232073695151589,
        "acc_norm": 0.6,
        "acc_norm_stderr": 0.04232073695151589
    },
    "harness|hendrycksTest-astronomy|5": {
        "acc": 0.5986842105263158,
        "acc_stderr": 0.039889037033362836,
        "acc_norm": 0.5986842105263158,
        "acc_norm_stderr": 0.039889037033362836
    },
    "harness|hendrycksTest-business_ethics|5": {
        "acc": 0.58,
        "acc_stderr": 0.049604496374885836,
        "acc_norm": 0.58,
        "acc_norm_stderr": 0.049604496374885836
    },
    "harness|hendrycksTest-clinical_knowledge|5": {
        "acc": 0.6867924528301886,
        "acc_stderr": 0.028544793319055326,
        "acc_norm": 0.6867924528301886,
        "acc_norm_stderr": 0.028544793319055326
    },
    "harness|hendrycksTest-college_biology|5": {
        "acc": 0.7083333333333334,
        "acc_stderr": 0.038009680605548594,
        "acc_norm": 0.7083333333333334,
        "acc_norm_stderr": 0.038009680605548594
    },
    "harness|hendrycksTest-college_chemistry|5": {
        "acc": 0.53,
        "acc_stderr": 0.05016135580465919,
        "acc_norm": 0.53,
        "acc_norm_stderr": 0.05016135580465919
    },
    "harness|hendrycksTest-college_computer_science|5": {
        "acc": 0.51,
        "acc_stderr": 0.05024183937956912,
        "acc_norm": 0.51,
        "acc_norm_stderr": 0.05024183937956912
    },
    "harness|hendrycksTest-college_mathematics|5": {
        "acc": 0.39,
        "acc_stderr": 0.04902071300001975,
        "acc_norm": 0.39,
        "acc_norm_stderr": 0.04902071300001975
    },
    "harness|hendrycksTest-college_medicine|5": {
        "acc": 0.6184971098265896,
        "acc_stderr": 0.03703851193099521,
        "acc_norm": 0.6184971098265896,
        "acc_norm_stderr": 0.03703851193099521
    },
    "harness|hendrycksTest-college_physics|5": {
        "acc": 0.39215686274509803,
        "acc_stderr": 0.048580835742663454,
        "acc_norm": 0.39215686274509803,
        "acc_norm_stderr": 0.048580835742663454
    },
    "harness|hendrycksTest-computer_security|5": {
        "acc": 0.75,
        "acc_stderr": 0.04351941398892446,
        "acc_norm": 0.75,
        "acc_norm_stderr": 0.04351941398892446
    },
    "harness|hendrycksTest-conceptual_physics|5": {
        "acc": 0.5659574468085107,
        "acc_stderr": 0.03240038086792747,
        "acc_norm": 0.5659574468085107,
        "acc_norm_stderr": 0.03240038086792747
    },
    "harness|hendrycksTest-econometrics|5": {
        "acc": 0.47368421052631576,
        "acc_stderr": 0.04697085136647863,
        "acc_norm": 0.47368421052631576,
        "acc_norm_stderr": 0.04697085136647863
    },
    "harness|hendrycksTest-electrical_engineering|5": {
        "acc": 0.5586206896551724,
        "acc_stderr": 0.04137931034482757,
        "acc_norm": 0.5586206896551724,
        "acc_norm_stderr": 0.04137931034482757
    },
    "harness|hendrycksTest-elementary_mathematics|5": {
        "acc": 0.41005291005291006,
        "acc_stderr": 0.025331202438944437,
        "acc_norm": 0.41005291005291006,
        "acc_norm_stderr": 0.025331202438944437
    },
    "harness|hendrycksTest-formal_logic|5": {
        "acc": 0.3888888888888889,
        "acc_stderr": 0.04360314860077459,
        "acc_norm": 0.3888888888888889,
        "acc_norm_stderr": 0.04360314860077459
    },
    "harness|hendrycksTest-global_facts|5": {
        "acc": 0.4,
        "acc_stderr": 0.049236596391733084,
        "acc_norm": 0.4,
        "acc_norm_stderr": 0.049236596391733084
    },
    "harness|hendrycksTest-high_school_biology|5": {
        "acc": 0.7580645161290323,
        "acc_stderr": 0.024362599693031083,
        "acc_norm": 0.7580645161290323,
        "acc_norm_stderr": 0.024362599693031083
    },
    "harness|hendrycksTest-high_school_chemistry|5": {
        "acc": 0.5221674876847291,
        "acc_stderr": 0.03514528562175008,
        "acc_norm": 0.5221674876847291,
        "acc_norm_stderr": 0.03514528562175008
    },
    "harness|hendrycksTest-high_school_computer_science|5": {
        "acc": 0.63,
        "acc_stderr": 0.04852365870939099,
        "acc_norm": 0.63,
        "acc_norm_stderr": 0.04852365870939099
    },
    "harness|hendrycksTest-high_school_european_history|5": {
        "acc": 0.7515151515151515,
        "acc_stderr": 0.033744026441394036,
        "acc_norm": 0.7515151515151515,
        "acc_norm_stderr": 0.033744026441394036
    },
    "harness|hendrycksTest-high_school_geography|5": {
        "acc": 0.7727272727272727,
        "acc_stderr": 0.029857515673386417,
        "acc_norm": 0.7727272727272727,
        "acc_norm_stderr": 0.029857515673386417
    },
    "harness|hendrycksTest-high_school_government_and_politics|5": {
        "acc": 0.8497409326424871,
        "acc_stderr": 0.025787723180723875,
        "acc_norm": 0.8497409326424871,
        "acc_norm_stderr": 0.025787723180723875
    },
    "harness|hendrycksTest-high_school_macroeconomics|5": {
        "acc": 0.6384615384615384,
        "acc_stderr": 0.024359581465396997,
        "acc_norm": 0.6384615384615384,
        "acc_norm_stderr": 0.024359581465396997
    },
    "harness|hendrycksTest-high_school_mathematics|5": {
        "acc": 0.337037037037037,
        "acc_stderr": 0.028820884666253255,
        "acc_norm": 0.337037037037037,
        "acc_norm_stderr": 0.028820884666253255
    },
    "harness|hendrycksTest-high_school_microeconomics|5": {
        "acc": 0.6764705882352942,
        "acc_stderr": 0.03038835355188679,
        "acc_norm": 0.6764705882352942,
        "acc_norm_stderr": 0.03038835355188679
    },
    "harness|hendrycksTest-high_school_physics|5": {
        "acc": 0.3443708609271523,
        "acc_stderr": 0.038796870240733264,
        "acc_norm": 0.3443708609271523,
        "acc_norm_stderr": 0.038796870240733264
    },
    "harness|hendrycksTest-high_school_psychology|5": {
        "acc": 0.8055045871559633,
        "acc_stderr": 0.01697028909045804,
        "acc_norm": 0.8055045871559633,
        "acc_norm_stderr": 0.01697028909045804
    },
    "harness|hendrycksTest-high_school_statistics|5": {
        "acc": 0.5370370370370371,
        "acc_stderr": 0.03400603625538272,
        "acc_norm": 0.5370370370370371,
        "acc_norm_stderr": 0.03400603625538272
    },
    "harness|hendrycksTest-high_school_us_history|5": {
        "acc": 0.7794117647058824,
        "acc_stderr": 0.02910225438967407,
        "acc_norm": 0.7794117647058824,
        "acc_norm_stderr": 0.02910225438967407
    },
    "harness|hendrycksTest-high_school_world_history|5": {
        "acc": 0.759493670886076,
        "acc_stderr": 0.027820781981149685,
        "acc_norm": 0.759493670886076,
        "acc_norm_stderr": 0.027820781981149685
    },
    "harness|hendrycksTest-human_aging|5": {
        "acc": 0.6636771300448431,
        "acc_stderr": 0.031708824268455,
        "acc_norm": 0.6636771300448431,
        "acc_norm_stderr": 0.031708824268455
    },
    "harness|hendrycksTest-human_sexuality|5": {
        "acc": 0.7251908396946565,
        "acc_stderr": 0.03915345408847836,
        "acc_norm": 0.7251908396946565,
        "acc_norm_stderr": 0.03915345408847836
    },
    "harness|hendrycksTest-international_law|5": {
        "acc": 0.71900826446281,
        "acc_stderr": 0.04103203830514512,
        "acc_norm": 0.71900826446281,
        "acc_norm_stderr": 0.04103203830514512
    },
    "harness|hendrycksTest-jurisprudence|5": {
        "acc": 0.7685185185185185,
        "acc_stderr": 0.04077494709252626,
        "acc_norm": 0.7685185185185185,
        "acc_norm_stderr": 0.04077494709252626
    },
    "harness|hendrycksTest-logical_fallacies|5": {
        "acc": 0.7177914110429447,
        "acc_stderr": 0.03536117886664743,
        "acc_norm": 0.7177914110429447,
        "acc_norm_stderr": 0.03536117886664743
    },
    "harness|hendrycksTest-machine_learning|5": {
        "acc": 0.41964285714285715,
        "acc_stderr": 0.04684099321077106,
        "acc_norm": 0.41964285714285715,
        "acc_norm_stderr": 0.04684099321077106
    },
    "harness|hendrycksTest-management|5": {
        "acc": 0.7766990291262136,
        "acc_stderr": 0.04123553189891431,
        "acc_norm": 0.7766990291262136,
        "acc_norm_stderr": 0.04123553189891431
    },
    "harness|hendrycksTest-marketing|5": {
        "acc": 0.8974358974358975,
        "acc_stderr": 0.019875655027867447,
        "acc_norm": 0.8974358974358975,
        "acc_norm_stderr": 0.019875655027867447
    },
    "harness|hendrycksTest-medical_genetics|5": {
        "acc": 0.71,
        "acc_stderr": 0.045604802157206845,
        "acc_norm": 0.71,
        "acc_norm_stderr": 0.045604802157206845
    },
    "harness|hendrycksTest-miscellaneous|5": {
        "acc": 0.7918263090676884,
        "acc_stderr": 0.014518592248904033,
        "acc_norm": 0.7918263090676884,
        "acc_norm_stderr": 0.014518592248904033
    },
    "harness|hendrycksTest-moral_disputes|5": {
        "acc": 0.7052023121387283,
        "acc_stderr": 0.024547617794803828,
        "acc_norm": 0.7052023121387283,
        "acc_norm_stderr": 0.024547617794803828
    },
    "harness|hendrycksTest-moral_scenarios|5": {
        "acc": 0.4044692737430168,
        "acc_stderr": 0.016414440917293147,
        "acc_norm": 0.4044692737430168,
        "acc_norm_stderr": 0.016414440917293147
    },
    "harness|hendrycksTest-nutrition|5": {
        "acc": 0.7091503267973857,
        "acc_stderr": 0.02600480036395213,
        "acc_norm": 0.7091503267973857,
        "acc_norm_stderr": 0.02600480036395213
    },
    "harness|hendrycksTest-philosophy|5": {
        "acc": 0.707395498392283,
        "acc_stderr": 0.02583989833487798,
        "acc_norm": 0.707395498392283,
        "acc_norm_stderr": 0.02583989833487798
    },
    "harness|hendrycksTest-prehistory|5": {
        "acc": 0.6944444444444444,
        "acc_stderr": 0.025630824975621355,
        "acc_norm": 0.6944444444444444,
        "acc_norm_stderr": 0.025630824975621355
    },
    "harness|hendrycksTest-professional_accounting|5": {
        "acc": 0.4716312056737589,
        "acc_stderr": 0.029779450957303055,
        "acc_norm": 0.4716312056737589,
        "acc_norm_stderr": 0.029779450957303055
    },
    "harness|hendrycksTest-professional_law|5": {
        "acc": 0.4302477183833116,
        "acc_stderr": 0.012645361435115233,
        "acc_norm": 0.4302477183833116,
        "acc_norm_stderr": 0.012645361435115233
    },
    "harness|hendrycksTest-professional_medicine|5": {
        "acc": 0.6397058823529411,
        "acc_stderr": 0.02916312857067073,
        "acc_norm": 0.6397058823529411,
        "acc_norm_stderr": 0.02916312857067073
    },
    "harness|hendrycksTest-professional_psychology|5": {
        "acc": 0.6470588235294118,
        "acc_stderr": 0.01933314202079716,
        "acc_norm": 0.6470588235294118,
        "acc_norm_stderr": 0.01933314202079716
    },
    "harness|hendrycksTest-public_relations|5": {
        "acc": 0.6363636363636364,
        "acc_stderr": 0.04607582090719976,
        "acc_norm": 0.6363636363636364,
        "acc_norm_stderr": 0.04607582090719976
    },
    "harness|hendrycksTest-security_studies|5": {
        "acc": 0.6775510204081633,
        "acc_stderr": 0.029923100563683906,
        "acc_norm": 0.6775510204081633,
        "acc_norm_stderr": 0.029923100563683906
    },
    "harness|hendrycksTest-sociology|5": {
        "acc": 0.8208955223880597,
        "acc_stderr": 0.027113286753111844,
        "acc_norm": 0.8208955223880597,
        "acc_norm_stderr": 0.027113286753111844
    },
    "harness|hendrycksTest-us_foreign_policy|5": {
        "acc": 0.85,
        "acc_stderr": 0.03588702812826371,
        "acc_norm": 0.85,
        "acc_norm_stderr": 0.03588702812826371
    },
    "harness|hendrycksTest-virology|5": {
        "acc": 0.5,
        "acc_stderr": 0.03892494720807614,
        "acc_norm": 0.5,
        "acc_norm_stderr": 0.03892494720807614
    },
    "harness|hendrycksTest-world_religions|5": {
        "acc": 0.8245614035087719,
        "acc_stderr": 0.029170885500727665,
        "acc_norm": 0.8245614035087719,
        "acc_norm_stderr": 0.029170885500727665
    },
    "harness|truthfulqa:mc|0": {
        "mc1": 0.3157894736842105,
        "mc1_stderr": 0.016272287957916923,
        "mc2": 0.4868797251828956,
        "mc2_stderr": 0.01529943410920313
    },
    "harness|winogrande|5": {
        "acc": 0.7813733228097869,
        "acc_stderr": 0.011616198215773239
    },
    "harness|gsm8k|5": {
        "acc": 0.36087945413191813,
        "acc_stderr": 0.013228626753925143
    }
}

🧩 Configuration

slices:
  - sources:
      - model: HuggingFaceH4/mistral-7b-anthropic
        layer_range: [0, 32]
      - model: HuggingFaceH4/mistral-7b-grok
        layer_range: [0, 32]
merge_method: slerp
base_model: HuggingFaceH4/mistral-7b-anthropic
parameters:
  t:
    - filter: self_attn
      value: [0, 0.5, 0.3, 0.7, 1]
    - filter: mlp
      value: [1, 0.5, 0.7, 0.3, 0]
    - value: 0.5
dtype: bfloat16

💻 Usage

!pip install -qU transformers accelerate

from transformers import AutoTokenizer
import transformers
import torch

model = "Isaak-Carter/JOSIE_Beta-8-7B-slerp"
messages = [{"role": "user", "content": "What is a large language model?"}]

tokenizer = AutoTokenizer.from_pretrained(model)
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
pipeline = transformers.pipeline(
    "text-generation",
    model=model,
    torch_dtype=torch.float16,
    device_map="auto",
)

outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
print(outputs[0]["generated_text"])
Downloads last month
4
Safetensors
Model size
7.24B params
Tensor type
BF16
·
Inference Examples
This model does not have enough activity to be deployed to Inference API (serverless) yet. Increase its social visibility and check back later, or deploy to Inference Endpoints (dedicated) instead.

Model tree for Goekdeniz-Guelmez/J.O.S.I.E.3-Beta8-slerp