File size: 3,272 Bytes
6e9f04c 7ce48fe 13604d8 5919540 13604d8 5919540 13604d8 5919540 13604d8 5919540 13604d8 5919540 13604d8 5919540 13604d8 7ce48fe 13604d8 7ce48fe 13604d8 7ce48fe 13604d8 7ce48fe 6e9f04c 5919540 13604d8 6e9f04c 13604d8 5919540 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
{
"activation_function": "gelu_new",
"adaptation_rate": 0.05,
"additional_special_tokens": [
"<greeting>",
"<farewell>",
"<thank>",
"<apology>"
],
"architectures": [
"GPT2Model"
],
"attn_pdrop": 0.1,
"bos_token_id": -1,
"contextual_embedding_dim": 2048,
"device": "cuda",
"dropout_rate": 0.1,
"embd_pdrop": 0.1,
"embedding_dim": 2048,
"eos_token_id": -1,
"hidden_dim": 2048,
"initializer_range": 0.02,
"innovative_growth_capacity": 50000,
"integration_settings": {
"config_name": "config.json",
"load_from_transformers": true,
"pytorch_dump_folder_path": "./model_save",
"pytorch_model_bin_name": "pytorch_model.bin"
},
"layer_norm_epsilon": 1e-05,
"lstm_hidden_dim": 2048,
"max_memory_size": 100000,
"max_neurons": 100,
"meta_learning_rate": 0.001,
"model_type": "gpt2",
"n_embd": 768,
"n_head": 16,
"n_inner": null,
"n_layer": 24,
"n_positions": 2048,
"num_embeddings": 50268,
"num_heads": 64,
"num_layers": 24,
"output_attentions": true,
"output_hidden_states": true,
"pad_token_id": -100,
"reorder_and_upcast_attn": false,
"resid_pdrop": 0.1,
"scale_attn_by_inverse_layer_idx": false,
"scale_attn_weights": true,
"sep_token_id": -1,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true,
"task_specific_params": {
"text-generation": {
"do_sample": true,
"length_penalty": 1.0,
"max_length": 50,
"no_repeat_ngram_size": 2,
"repetition_penalty": 1.2,
"temperature": 0.9,
"top_k": 50,
"top_p": 0.95
}
},
"return_dict": true,
"torchscript": false,
"torch_dtype": null,
"use_bfloat16": false,
"tf_legacy_loss": false,
"pruned_heads": {},
"tie_word_embeddings": true,
"chunk_size_feed_forward": 0,
"is_encoder_decoder": false,
"is_decoder": false,
"cross_attention_hidden_size": null,
"add_cross_attention": false,
"tie_encoder_decoder": false,
"max_length": 20,
"min_length": 0,
"do_sample": false,
"early_stopping": false,
"num_beams": 1,
"num_beam_groups": 1,
"diversity_penalty": 0.0,
"temperature": 1.0,
"top_k": 50,
"top_p": 1.0,
"typical_p": 1.0,
"repetition_penalty": 1.0,
"length_penalty": 1.0,
"no_repeat_ngram_size": 0,
"encoder_no_repeat_ngram_size": 0,
"bad_words_ids": null,
"num_return_sequences": 1,
"output_scores": false,
"return_dict_in_generate": false,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"remove_invalid_values": false,
"exponential_decay_length_penalty": null,
"suppress_tokens": null,
"begin_suppress_tokens": null,
"finetuning_task": null,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"tokenizer_class": null,
"prefix": null,
"decoder_start_token_id": null,
"problem_type": null,
"transformers_version": null,
"vocab_size": 50281,
"context_window": 20,
"env": null,
"state_shape": null,
"action_size": 50257,
"q_model": null,
"target_q_model": null
} |