type
stringclasses 1
value | id
stringlengths 5
122
| num_branches
int64 1
1.76k
| branches
sequencelengths 1
1.76k
| main_branch_size
int64 0
32,943B
|
---|---|---|---|---|
model | kotokounity/JSaebi | 1 | [
"main"
] | 127,473,978 |
model | gaianet/Yi-1.5-9B-Chat-GGUF | 1 | [
"main"
] | 84,471,140,745 |
model | blockblockblock/Dark-Miqu-70B-bpw5.5-exl2 | 1 | [
"main"
] | 47,775,073,633 |
model | abc88767/22c23 | 1 | [
"main"
] | 3,295,853,063 |
model | fieldtm/0512-longformer_korquad | 1 | [
"main"
] | 1,519 |
model | giannisan/timesformer-base-finetuned-k400-finetuned-ucf101-subset | 1 | [
"main"
] | 6,306,938,506 |
model | RichardErkhov/lemon-mint_-_gemma-2b-translation-v0.103-4bits | 1 | [
"main"
] | 2,185,300,748 |
model | jachs182/distilbert-base-uncased-finetuned-emotion | 1 | [
"main"
] | 268,804,837 |
model | NBA55/Experiment_with_trained_model_Final_CPO_for_all_3_issues-epoch-2 | 1 | [
"main"
] | 402,687,162 |
model | jujusosmart/bert-base-chinese-Penalty | 1 | [
"main"
] | 409,107,865 |
model | OwenArli/ArliAI-Llama-3-8B-Dolfin-v0.5-GGUF | 1 | [
"main"
] | 67,341,501,641 |
model | NatalieCheong/detr-resnet-50-hardhat-finetuned | 1 | [
"main"
] | 1,519 |
model | Harsh994/PlantDiseaseRecoginition | 1 | [
"main"
] | 343,229,634 |
model | kenken999/model999 | 1 | [
"main"
] | 1,562 |
model | PLS442/Entrevistadora_Oroqui | 1 | [
"main"
] | 56,190,781 |
model | dbaek111/Llama-2-7b-chat-hf-Elon_407_HPC_Q | 1 | [
"main"
] | 4,829,132,333 |
model | liveforloss/Logan2 | 1 | [
"main"
] | 1,519 |
model | nelsonauner/results | 1 | [
"main"
] | 268,068,492 |
model | jiangcongtao/llama3-8b-oig-unsloth-merged | 1 | [
"main"
] | 16,069,789,678 |
model | Coolwowsocoolwow/Blaze_Sonic_06 | 1 | [
"main"
] | 97,483,478 |
model | SunShineFlower/HeizePretrained_200epoch | 1 | [
"main"
] | 87,576,903 |
model | RichardErkhov/lightblue_-_suzume-llama-3-8B-multilingual-8bits | 1 | [
"main"
] | 9,095,998,121 |
model | MinhViet/Viet1 | 1 | [
"main"
] | 2,485,511,066 |
model | second-state/Yi-1.5-34B-Chat-GGUF | 1 | [
"main"
] | 327,578,314,277 |
model | gaianet/Yi-1.5-34B-Chat-GGUF | 1 | [
"main"
] | 327,578,310,967 |
model | jiangcongtao/llama3-8b-oig-unsloth | 1 | [
"main"
] | 167,835,070 |
model | RichardErkhov/lemon-mint_-_gemma-2b-translation-v0.103-8bits | 1 | [
"main"
] | 3,055,146,642 |
model | Kvn317/Me | 1 | [
"main"
] | 1,519 |
model | stafdif/Kate | 1 | [
"main"
] | 57,420,851 |
model | MarceloLZR/titanic | 1 | [
"main"
] | 38,341 |
model | abc88767/3sc23 | 1 | [
"main"
] | 3,295,853,063 |
model | euiyulsong/mistral-7b-qlora-arc-sft2 | 1 | [
"main"
] | 4,977,957,950 |
model | AdityaXPV/Llama-3-8b-Instruct-law-sage-v0.1 | 1 | [
"main"
] | 16,069,719,494 |
model | ychuan/llama2-qlora-finetunined-french | 1 | [
"main"
] | 8,183,901,142 |
model | RichardErkhov/senseable_-_WestLake-7B-v2-gguf | 1 | [
"main"
] | 87,511,276,125 |
model | haochicheers/finetune_llama3_forcast_has_output_issue | 1 | [
"main"
] | 176,971,819 |
model | netcat420/MFANN3bV0.8.10 | 1 | [
"main"
] | 5,562,833,934 |
model | sue123456/KoBERT-finetuned-squad_korv1-accelerate | 1 | [
"main"
] | 366,679,283 |
model | seawolf2357/mergetest2 | 1 | [
"main"
] | 14,485,816,047 |
model | GeorgiaTech/0.0_llama_nodpo_3iters_bs128_531lr_iter_2 | 1 | [
"main"
] | 16,069,738,803 |
model | Recaru/nox_DPOv3-Q4_K_M-GGUF | 1 | [
"main"
] | 6,461,670,919 |
model | Holarissun/RM-TLDR_contrast_loraR32_-1_gemma2b_lr5e-05_bs2_g4 | 1 | [
"main"
] | 32,323,096 |
model | Recaru/nox_DPOv3-Q4_K_S-GGUF | 1 | [
"main"
] | 6,118,524,423 |
model | TinyPixel/dnb | 1 | [
"main"
] | 71,547,771 |
model | yiyu-earth/sherpa-onnx-paraformer-zh-2024-04-25 | 1 | [
"main"
] | 243,463,598 |
model | ajlao/dqn-SpaceInvadersNoFrameskip-v4 | 1 | [
"main"
] | 30,868,438 |
model | RichardErkhov/lemon-mint_-_gemma-2b-translation-v0.103-gguf | 1 | [
"main"
] | 32,891,497,636 |
model | Coolwowsocoolwow/Pinstripe_potoroo | 1 | [
"main"
] | 221,605,586 |
model | Brei004/titanich5 | 1 | [
"main"
] | 1,519 |
model | Yuki20/Alpaca_8b_unsloth_json | 1 | [
"main"
] | 16,237,553,074 |
model | Abdulhanan2006/WaifuAI-L3-8B-8k-gguf | 1 | [
"main"
] | 24,609,663,550 |
model | salapierrot16/bobbyminus | 1 | [
"main"
] | 104,917,168 |
model | Thouph/tagger-siglip-so400m-384-9940 | 1 | [
"main"
] | 1,759,133,472 |
model | uncol/ppo-LunarLander-v2 | 1 | [
"main"
] | 490,883 |
model | GENIAC-Team-Ozaki/lora-sft-excluded-chatbot-arena-finetuned-stage2-iter40000 | 1 | [
"main"
] | 23,062,848,213 |
model | JihoonLee98/Korean_grammer_correction | 1 | [
"main"
] | 15,286,800,458 |
model | ajlao/dqn-BreakoutNoFrameskip-v4_2 | 1 | [
"main"
] | 30,631,249 |
model | sue123456/BERT-multilingual-finetuned-squad_korv1-accelerate | 1 | [
"main"
] | 712,999,550 |
model | Recaru/nox-solar-10.7b-v4-Q4_K_S-GGUF | 1 | [
"main"
] | 6,118,524,405 |
model | fine-tuned/jina-embeddings-v2-base-en-2024512-wvj9-webapp | 1 | [
"main"
] | 550,554,548 |
model | Stefan171/TinyLlama-QuantumQuill-chat-12-05-24 | 1 | [
"main"
] | 2,202,512,407 |
model | Minbyul/selfbiorag-7b-wo-kqa_golden-iter-dpo-step2 | 1 | [
"main"
] | 13,479,524,679 |
model | Coolwowsocoolwow/Google_Translate | 1 | [
"main"
] | 145,144,307 |
model | ArtChicken/fohwx-woman-ponyxl-27DProny | 1 | [
"main"
] | 22,851,137,527 |
model | Vedx04/OpenMath-Mistral-7B-v0.1-hf-hendrycks | 1 | [
"main"
] | 170,129,831 |
model | amphora/d-math | 1 | [
"main"
] | 28,969,284,204 |
model | mdhameem/image_to_food | 1 | [
"main"
] | 135,391,813 |
model | RichardErkhov/Technoculture_-_Medorca-4x7b-gguf | 1 | [
"main"
] | 237,007,443,426 |
model | kishorea/Llama3_p8_v2 | 1 | [
"main"
] | 9,171,517,405 |
model | AlignmentResearch/robust_llm_pythia-31m_niki-047_wl_random-token-1280_seed-2 | 31 | [
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model | AlignmentResearch/robust_llm_pythia-31m_niki-047_wl_random-token-1280_seed-1 | 31 | [
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model | AlignmentResearch/robust_llm_pythia-14m_niki-047_wl_random-token-1280_seed-0 | 31 | [
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model | AlignmentResearch/robust_llm_pythia-14m_niki-047_wl_random-token-1280_seed-1 | 31 | [
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model | AlignmentResearch/robust_llm_pythia-31m_niki-047_wl_random-token-1280_seed-0 | 31 | [
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model | AlignmentResearch/robust_llm_pythia-14m_niki-047_wl_random-token-1280_seed-2 | 31 | [
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model | seawolf2357/kollm3 | 1 | [
"main"
] | 14,485,321,520 |
model | ajlao/my_awesome_qa_model | 1 | [
"main"
] | 266,536,580 |
model | RichardErkhov/GritLM_-_GritLM-8x7B-gguf | 1 | [
"main"
] | 359,731,798,407 |
model | AlignmentResearch/robust_llm_pythia-70m_niki-046_enronspam_random-token-1280_seed-2 | 31 | [
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model | rhye/cillian_lora | 1 | [
"main"
] | 54,302,248 |
model | netcat420/MFANN3bv0.8.10-GGUF | 1 | [
"main"
] | 3,356,490,431 |
model | jOS63/my_awesome_qa_model | 1 | [
"main"
] | 266,531,914 |
model | saeidebbei/Azposht | 1 | [
"main"
] | 1,519 |
model | gaizerick/anika | 1 | [
"main"
] | 51,104,810 |
model | AmeenAli023/mamba_text_classification_sst | 1 | [
"main"
] | 1,519 |
model | saeidebbei/Koon | 1 | [
"main"
] | 1,519 |
model | thomaschang/LLama3_ADR_3500datatrained_MP | 1 | [
"main"
] | 32,121,110,199 |
model | solidrust/Fugaku-LLM-13B-AWQ | 1 | [
"main"
] | 2,728 |
model | DUAL-GPO/phi-2-gpo-v5-i1 | 1 | [
"main"
] | 171,286,447 |
model | rohan-2810/imagecap_blip_long | 1 | [
"main"
] | 1,879,958,659 |
model | saeidebbei/Beriztoush | 1 | [
"main"
] | 1,519 |
model | zhoukz/llama-3-70b-instruct-4bit | 1 | [
"main"
] | 39,527,714,048 |
model | TaroVN/NeoX-cost-0512-v1 | 1 | [
"main"
] | 171,342 |
model | Malkith99/Llama-2-7b-absa-semeval-2014-restaurants | 1 | [
"main"
] | 13,479,328,242 |
model | Rimyy/mistraftgsm1 | 1 | [
"main"
] | 14,485,819,828 |
model | vaugheu/lora_model | 1 | [
"main"
] | 264,570,583 |
model | seawolf2357/kollm7 | 1 | [
"main"
] | 58,973,788,193 |
model | med-alex/uzn-roberta-base-ft-qa-ru-mt-to-uzn | 1 | [
"main"
] | 352,198,903 |
model | euiyulsong/mistral-7b-qlora-arc-semi | 1 | [
"main"
] | 4,977,958,534 |
model | chujiezheng/tulu-2-dpo-7b | 1 | [
"main"
] | 13,479,241,989 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.