Isaak Carter Augustus
Update README.md
45fa446 verified
|
raw
history blame
15.5 kB
metadata
license: apache-2.0
tags:
  - merge
  - mergekit
  - lazymergekit
  - cognitivecomputations/dolphin-2.8-experiment26-7b
  - argilla/CapybaraHermes-2.5-Mistral-7B
base_model:
  - cognitivecomputations/dolphin-2.8-experiment26-7b
  - argilla/CapybaraHermes-2.5-Mistral-7B

J.O.S.I.E.3-Beta11-7B-slerp

J.O.S.I.E.3-Beta11-7B-slerp is a merge of the following models using LazyMergekit:

🧩 Configuration

slices:
  - sources:
      - model: cognitivecomputations/dolphin-2.8-experiment26-7b
        layer_range: [0, 32]
      - model: argilla/CapybaraHermes-2.5-Mistral-7B
        layer_range: [0, 32]
merge_method: slerp
base_model: argilla/CapybaraHermes-2.5-Mistral-7B
parameters:
  t:
    - filter: self_attn
      value: [0, 0.5, 0.3, 0.7, 1]
    - filter: mlp
      value: [1, 0.5, 0.7, 0.3, 0]
    - value: 0.5
dtype: bfloat16

Evaluation

{
    "all": {
        "acc": 0.6403971587643947,
        "acc_stderr": 0.03228725576276735,
        "acc_norm": 0.6413927640714372,
        "acc_norm_stderr": 0.03294011331780708,
        "mc1": 0.39167686658506734,
        "mc1_stderr": 0.017087795881769622,
        "mc2": 0.5576866593959974,
        "mc2_stderr": 0.01554622060467735
    },
    "harness|arc:challenge|25": {
        "acc": 0.6186006825938567,
        "acc_stderr": 0.014194389086685244,
        "acc_norm": 0.6450511945392492,
        "acc_norm_stderr": 0.013983036904094087
    },
    "harness|hellaswag|10": {
        "acc": 0.6738697470623382,
        "acc_stderr": 0.004678375103797962,
        "acc_norm": 0.8499302927703645,
        "acc_norm_stderr": 0.003564098420387764
    },
    "harness|hendrycksTest-abstract_algebra|5": {
        "acc": 0.28,
        "acc_stderr": 0.04512608598542128,
        "acc_norm": 0.28,
        "acc_norm_stderr": 0.04512608598542128
    },
    "harness|hendrycksTest-anatomy|5": {
        "acc": 0.5777777777777777,
        "acc_stderr": 0.04266763404099582,
        "acc_norm": 0.5777777777777777,
        "acc_norm_stderr": 0.04266763404099582
    },
    "harness|hendrycksTest-astronomy|5": {
        "acc": 0.6907894736842105,
        "acc_stderr": 0.037610708698674805,
        "acc_norm": 0.6907894736842105,
        "acc_norm_stderr": 0.037610708698674805
    },
    "harness|hendrycksTest-business_ethics|5": {
        "acc": 0.63,
        "acc_stderr": 0.04852365870939099,
        "acc_norm": 0.63,
        "acc_norm_stderr": 0.04852365870939099
    },
    "harness|hendrycksTest-clinical_knowledge|5": {
        "acc": 0.6830188679245283,
        "acc_stderr": 0.02863723563980089,
        "acc_norm": 0.6830188679245283,
        "acc_norm_stderr": 0.02863723563980089
    },
    "harness|hendrycksTest-college_biology|5": {
        "acc": 0.7708333333333334,
        "acc_stderr": 0.03514697467862388,
        "acc_norm": 0.7708333333333334,
        "acc_norm_stderr": 0.03514697467862388
    },
    "harness|hendrycksTest-college_chemistry|5": {
        "acc": 0.48,
        "acc_stderr": 0.050211673156867795,
        "acc_norm": 0.48,
        "acc_norm_stderr": 0.050211673156867795
    },
    "harness|hendrycksTest-college_computer_science|5": {
        "acc": 0.51,
        "acc_stderr": 0.05024183937956912,
        "acc_norm": 0.51,
        "acc_norm_stderr": 0.05024183937956912
    },
    "harness|hendrycksTest-college_mathematics|5": {
        "acc": 0.38,
        "acc_stderr": 0.04878317312145634,
        "acc_norm": 0.38,
        "acc_norm_stderr": 0.04878317312145634
    },
    "harness|hendrycksTest-college_medicine|5": {
        "acc": 0.6358381502890174,
        "acc_stderr": 0.03669072477416907,
        "acc_norm": 0.6358381502890174,
        "acc_norm_stderr": 0.03669072477416907
    },
    "harness|hendrycksTest-college_physics|5": {
        "acc": 0.4019607843137255,
        "acc_stderr": 0.048786087144669955,
        "acc_norm": 0.4019607843137255,
        "acc_norm_stderr": 0.048786087144669955
    },
    "harness|hendrycksTest-computer_security|5": {
        "acc": 0.74,
        "acc_stderr": 0.0440844002276808,
        "acc_norm": 0.74,
        "acc_norm_stderr": 0.0440844002276808
    },
    "harness|hendrycksTest-conceptual_physics|5": {
        "acc": 0.548936170212766,
        "acc_stderr": 0.032529096196131965,
        "acc_norm": 0.548936170212766,
        "acc_norm_stderr": 0.032529096196131965
    },
    "harness|hendrycksTest-econometrics|5": {
        "acc": 0.4649122807017544,
        "acc_stderr": 0.046920083813689104,
        "acc_norm": 0.4649122807017544,
        "acc_norm_stderr": 0.046920083813689104
    },
    "harness|hendrycksTest-electrical_engineering|5": {
        "acc": 0.5172413793103449,
        "acc_stderr": 0.04164188720169375,
        "acc_norm": 0.5172413793103449,
        "acc_norm_stderr": 0.04164188720169375
    },
    "harness|hendrycksTest-elementary_mathematics|5": {
        "acc": 0.41534391534391535,
        "acc_stderr": 0.025379524910778398,
        "acc_norm": 0.41534391534391535,
        "acc_norm_stderr": 0.025379524910778398
    },
    "harness|hendrycksTest-formal_logic|5": {
        "acc": 0.4444444444444444,
        "acc_stderr": 0.044444444444444495,
        "acc_norm": 0.4444444444444444,
        "acc_norm_stderr": 0.044444444444444495
    },
    "harness|hendrycksTest-global_facts|5": {
        "acc": 0.33,
        "acc_stderr": 0.047258156262526045,
        "acc_norm": 0.33,
        "acc_norm_stderr": 0.047258156262526045
    },
    "harness|hendrycksTest-high_school_biology|5": {
        "acc": 0.7741935483870968,
        "acc_stderr": 0.023785577884181012,
        "acc_norm": 0.7741935483870968,
        "acc_norm_stderr": 0.023785577884181012
    },
    "harness|hendrycksTest-high_school_chemistry|5": {
        "acc": 0.4729064039408867,
        "acc_stderr": 0.03512819077876106,
        "acc_norm": 0.4729064039408867,
        "acc_norm_stderr": 0.03512819077876106
    },
    "harness|hendrycksTest-high_school_computer_science|5": {
        "acc": 0.68,
        "acc_stderr": 0.04688261722621505,
        "acc_norm": 0.68,
        "acc_norm_stderr": 0.04688261722621505
    },
    "harness|hendrycksTest-high_school_european_history|5": {
        "acc": 0.793939393939394,
        "acc_stderr": 0.0315841532404771,
        "acc_norm": 0.793939393939394,
        "acc_norm_stderr": 0.0315841532404771
    },
    "harness|hendrycksTest-high_school_geography|5": {
        "acc": 0.8181818181818182,
        "acc_stderr": 0.027479603010538804,
        "acc_norm": 0.8181818181818182,
        "acc_norm_stderr": 0.027479603010538804
    },
    "harness|hendrycksTest-high_school_government_and_politics|5": {
        "acc": 0.9067357512953368,
        "acc_stderr": 0.02098685459328972,
        "acc_norm": 0.9067357512953368,
        "acc_norm_stderr": 0.02098685459328972
    },
    "harness|hendrycksTest-high_school_macroeconomics|5": {
        "acc": 0.658974358974359,
        "acc_stderr": 0.024035489676335082,
        "acc_norm": 0.658974358974359,
        "acc_norm_stderr": 0.024035489676335082
    },
    "harness|hendrycksTest-high_school_mathematics|5": {
        "acc": 0.32592592592592595,
        "acc_stderr": 0.028578348365473072,
        "acc_norm": 0.32592592592592595,
        "acc_norm_stderr": 0.028578348365473072
    },
    "harness|hendrycksTest-high_school_microeconomics|5": {
        "acc": 0.680672268907563,
        "acc_stderr": 0.030283995525884396,
        "acc_norm": 0.680672268907563,
        "acc_norm_stderr": 0.030283995525884396
    },
    "harness|hendrycksTest-high_school_physics|5": {
        "acc": 0.3576158940397351,
        "acc_stderr": 0.03913453431177258,
        "acc_norm": 0.3576158940397351,
        "acc_norm_stderr": 0.03913453431177258
    },
    "harness|hendrycksTest-high_school_psychology|5": {
        "acc": 0.8348623853211009,
        "acc_stderr": 0.015919557829976037,
        "acc_norm": 0.8348623853211009,
        "acc_norm_stderr": 0.015919557829976037
    },
    "harness|hendrycksTest-high_school_statistics|5": {
        "acc": 0.5138888888888888,
        "acc_stderr": 0.03408655867977749,
        "acc_norm": 0.5138888888888888,
        "acc_norm_stderr": 0.03408655867977749
    },
    "harness|hendrycksTest-high_school_us_history|5": {
        "acc": 0.7892156862745098,
        "acc_stderr": 0.028626547912437413,
        "acc_norm": 0.7892156862745098,
        "acc_norm_stderr": 0.028626547912437413
    },
    "harness|hendrycksTest-high_school_world_history|5": {
        "acc": 0.8143459915611815,
        "acc_stderr": 0.02531049537694486,
        "acc_norm": 0.8143459915611815,
        "acc_norm_stderr": 0.02531049537694486
    },
    "harness|hendrycksTest-human_aging|5": {
        "acc": 0.6995515695067265,
        "acc_stderr": 0.030769352008229146,
        "acc_norm": 0.6995515695067265,
        "acc_norm_stderr": 0.030769352008229146
    },
    "harness|hendrycksTest-human_sexuality|5": {
        "acc": 0.7938931297709924,
        "acc_stderr": 0.03547771004159465,
        "acc_norm": 0.7938931297709924,
        "acc_norm_stderr": 0.03547771004159465
    },
    "harness|hendrycksTest-international_law|5": {
        "acc": 0.768595041322314,
        "acc_stderr": 0.03849856098794088,
        "acc_norm": 0.768595041322314,
        "acc_norm_stderr": 0.03849856098794088
    },
    "harness|hendrycksTest-jurisprudence|5": {
        "acc": 0.7870370370370371,
        "acc_stderr": 0.0395783547198098,
        "acc_norm": 0.7870370370370371,
        "acc_norm_stderr": 0.0395783547198098
    },
    "harness|hendrycksTest-logical_fallacies|5": {
        "acc": 0.7668711656441718,
        "acc_stderr": 0.0332201579577674,
        "acc_norm": 0.7668711656441718,
        "acc_norm_stderr": 0.0332201579577674
    },
    "harness|hendrycksTest-machine_learning|5": {
        "acc": 0.5178571428571429,
        "acc_stderr": 0.047427623612430116,
        "acc_norm": 0.5178571428571429,
        "acc_norm_stderr": 0.047427623612430116
    },
    "harness|hendrycksTest-management|5": {
        "acc": 0.7864077669902912,
        "acc_stderr": 0.040580420156460344,
        "acc_norm": 0.7864077669902912,
        "acc_norm_stderr": 0.040580420156460344
    },
    "harness|hendrycksTest-marketing|5": {
        "acc": 0.8675213675213675,
        "acc_stderr": 0.022209309073165616,
        "acc_norm": 0.8675213675213675,
        "acc_norm_stderr": 0.022209309073165616
    },
    "harness|hendrycksTest-medical_genetics|5": {
        "acc": 0.69,
        "acc_stderr": 0.04648231987117316,
        "acc_norm": 0.69,
        "acc_norm_stderr": 0.04648231987117316
    },
    "harness|hendrycksTest-miscellaneous|5": {
        "acc": 0.8007662835249042,
        "acc_stderr": 0.014283378044296418,
        "acc_norm": 0.8007662835249042,
        "acc_norm_stderr": 0.014283378044296418
    },
    "harness|hendrycksTest-moral_disputes|5": {
        "acc": 0.7225433526011561,
        "acc_stderr": 0.024105712607754307,
        "acc_norm": 0.7225433526011561,
        "acc_norm_stderr": 0.024105712607754307
    },
    "harness|hendrycksTest-moral_scenarios|5": {
        "acc": 0.30726256983240224,
        "acc_stderr": 0.01543015884646961,
        "acc_norm": 0.30726256983240224,
        "acc_norm_stderr": 0.01543015884646961
    },
    "harness|hendrycksTest-nutrition|5": {
        "acc": 0.7320261437908496,
        "acc_stderr": 0.025360603796242557,
        "acc_norm": 0.7320261437908496,
        "acc_norm_stderr": 0.025360603796242557
    },
    "harness|hendrycksTest-philosophy|5": {
        "acc": 0.7138263665594855,
        "acc_stderr": 0.025670259242188933,
        "acc_norm": 0.7138263665594855,
        "acc_norm_stderr": 0.025670259242188933
    },
    "harness|hendrycksTest-prehistory|5": {
        "acc": 0.7283950617283951,
        "acc_stderr": 0.024748624490537368,
        "acc_norm": 0.7283950617283951,
        "acc_norm_stderr": 0.024748624490537368
    },
    "harness|hendrycksTest-professional_accounting|5": {
        "acc": 0.48936170212765956,
        "acc_stderr": 0.029820747191422473,
        "acc_norm": 0.48936170212765956,
        "acc_norm_stderr": 0.029820747191422473
    },
    "harness|hendrycksTest-professional_law|5": {
        "acc": 0.4706649282920469,
        "acc_stderr": 0.01274823839736555,
        "acc_norm": 0.4706649282920469,
        "acc_norm_stderr": 0.01274823839736555
    },
    "harness|hendrycksTest-professional_medicine|5": {
        "acc": 0.6764705882352942,
        "acc_stderr": 0.028418208619406762,
        "acc_norm": 0.6764705882352942,
        "acc_norm_stderr": 0.028418208619406762
    },
    "harness|hendrycksTest-professional_psychology|5": {
        "acc": 0.6764705882352942,
        "acc_stderr": 0.018926082916083376,
        "acc_norm": 0.6764705882352942,
        "acc_norm_stderr": 0.018926082916083376
    },
    "harness|hendrycksTest-public_relations|5": {
        "acc": 0.6636363636363637,
        "acc_stderr": 0.04525393596302506,
        "acc_norm": 0.6636363636363637,
        "acc_norm_stderr": 0.04525393596302506
    },
    "harness|hendrycksTest-security_studies|5": {
        "acc": 0.7387755102040816,
        "acc_stderr": 0.02812342933514278,
        "acc_norm": 0.7387755102040816,
        "acc_norm_stderr": 0.02812342933514278
    },
    "harness|hendrycksTest-sociology|5": {
        "acc": 0.835820895522388,
        "acc_stderr": 0.02619392354445412,
        "acc_norm": 0.835820895522388,
        "acc_norm_stderr": 0.02619392354445412
    },
    "harness|hendrycksTest-us_foreign_policy|5": {
        "acc": 0.83,
        "acc_stderr": 0.0377525168068637,
        "acc_norm": 0.83,
        "acc_norm_stderr": 0.0377525168068637
    },
    "harness|hendrycksTest-virology|5": {
        "acc": 0.5421686746987951,
        "acc_stderr": 0.0387862677100236,
        "acc_norm": 0.5421686746987951,
        "acc_norm_stderr": 0.0387862677100236
    },
    "harness|hendrycksTest-world_religions|5": {
        "acc": 0.8187134502923976,
        "acc_stderr": 0.029547741687640038,
        "acc_norm": 0.8187134502923976,
        "acc_norm_stderr": 0.029547741687640038
    },
    "harness|truthfulqa:mc|0": {
        "mc1": 0.39167686658506734,
        "mc1_stderr": 0.017087795881769622,
        "mc2": 0.5576866593959974,
        "mc2_stderr": 0.01554622060467735
    },
    "harness|winogrande|5": {
        "acc": 0.7884767166535123,
        "acc_stderr": 0.011477747684223188
    },
    "harness|gsm8k|5": {
        "acc": 0.6360879454131918,
        "acc_stderr": 0.013252539227966195
    }
}

💻 Usage

!pip install -qU transformers accelerate

from transformers import AutoTokenizer
import transformers
import torch

model = "Isaak-Carter/J.O.S.I.E.3-Beta11-7B-slerp"
messages = [{"role": "user", "content": "What is a large language model?"}]

tokenizer = AutoTokenizer.from_pretrained(model)
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
pipeline = transformers.pipeline(
    "text-generation",
    model=model,
    torch_dtype=torch.float16,
    device_map="auto",
)

outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
print(outputs[0]["generated_text"])