File size: 2,782 Bytes
7cfa824 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
{
"module": "keras_hub.src.models.mistral.mistral_causal_lm",
"class_name": "MistralCausalLM",
"config": {
"backbone": {
"module": "keras_hub.src.models.mistral.mistral_backbone",
"class_name": "MistralBackbone",
"config": {
"name": "mistral_backbone_1",
"trainable": true,
"vocabulary_size": 32000,
"num_layers": 32,
"num_query_heads": 32,
"hidden_dim": 4096,
"intermediate_dim": 14336,
"rope_max_wavelength": 10000.0,
"rope_scaling_factor": 1.0,
"num_key_value_heads": 8,
"sliding_window": 4096,
"layer_norm_epsilon": 1e-05,
"dropout": 0
},
"registered_name": "keras_hub>MistralBackbone"
},
"preprocessor": {
"module": "keras_hub.src.models.mistral.mistral_causal_lm_preprocessor",
"class_name": "MistralCausalLMPreprocessor",
"config": {
"name": "mistral_causal_lm_preprocessor",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "bfloat16"
},
"registered_name": null
},
"tokenizer": {
"module": "keras_hub.src.models.mistral.mistral_tokenizer",
"class_name": "MistralTokenizer",
"config": {
"name": "mistral_tokenizer",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "int32"
},
"registered_name": null
},
"config_file": "tokenizer.json",
"proto": null,
"sequence_length": null,
"add_bos": false,
"add_eos": false
},
"registered_name": "keras_hub>MistralTokenizer"
},
"config_file": "preprocessor.json",
"sequence_length": 1024,
"add_start_token": true,
"add_end_token": true
},
"registered_name": "keras_hub>MistralCausalLMPreprocessor"
},
"name": "mistral_causal_lm"
},
"registered_name": "keras_hub>MistralCausalLM"
} |