Aryanne commited on
Commit
f5f361d
1 Parent(s): 29e0dd1

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "jondurbin/airoboros-3b-3p0",
3
  "architectures": [
4
  "StableLMEpochForCausalLM"
5
  ],
6
  "auto_map": {
7
- "AutoConfig": "jondurbin/airoboros-3b-3p0--configuration_stablelm_epoch.StableLMEpochConfig",
8
- "AutoModelForCausalLM": "jondurbin/airoboros-3b-3p0--modeling_stablelm_epoch.StableLMEpochForCausalLM"
9
  },
10
  "bos_token_id": 0,
11
  "eos_token_id": 0,
@@ -24,7 +24,7 @@
24
  "rope_theta": 10000,
25
  "rotary_scaling_factor": 1.0,
26
  "tie_word_embeddings": false,
27
- "torch_dtype": "float32",
28
  "transformers_version": "4.35.2",
29
  "use_cache": false,
30
  "vocab_size": 50304
 
1
  {
2
+ "_name_or_path": "Aryanne/Astridboros-3B",
3
  "architectures": [
4
  "StableLMEpochForCausalLM"
5
  ],
6
  "auto_map": {
7
+ "AutoConfig": "configuration_stablelm_epoch.StableLMEpochConfig",
8
+ "AutoModelForCausalLM": "modeling_stablelm_epoch.StableLMEpochForCausalLM"
9
  },
10
  "bos_token_id": 0,
11
  "eos_token_id": 0,
 
24
  "rope_theta": 10000,
25
  "rotary_scaling_factor": 1.0,
26
  "tie_word_embeddings": false,
27
+ "torch_dtype": "float16",
28
  "transformers_version": "4.35.2",
29
  "use_cache": false,
30
  "vocab_size": 50304