phillnet / config.json
ayjays132's picture
Update config.json
3555156
raw
history blame
949 Bytes
{
"activation_function": "gelu_new",
"architectures": [
"GPT2LMHeadModel"
],
"complexity_metric": null,
"desired_improvement_rate": 0.02,
"ecosystem_dynamics": {
"environmental_volatility": 0.1,
"resource_pool": 1.0
},
"embedding_dim": 768,
"growth_improvement_threshold": 0.01,
"hidden_dim": 2048,
"initial_neuron_count": 5000,
"innovative_growth_net": {
"adaptation_rate": 0.05,
"complexity_metric": null,
"initial_capacity": 250000,
"input_size": 2048
},
"input_dimension": 768,
"low_stability_threshold": 0.01,
"max_complexity": 10000,
"max_neurons": 250000,
"max_sequence_length": 200,
"min_epochs_before_growth": 5,
"model_filename": "pytorch_model.bin",
"model_type": "llama",
"num_embeddings": 25000,
"pruning_improvement_threshold": 0.005,
"some_adaptation_rate": 0.05,
"stability_threshold": 0.02,
"start_token_index": 2,
"transformers_version": "4.34.0"
}