Gengzigang commited on
Commit
b155c46
1 Parent(s): a3b67b2
Files changed (1) hide show
  1. config.json +0 -80
config.json CHANGED
@@ -12,86 +12,6 @@
12
  "logit_scale_init_value": 2.659260036932778,
13
  "model_type": "clip",
14
  "projection_dim": 1280,
15
- "text_config": {
16
- "_name_or_path": "",
17
- "add_cross_attention": false,
18
- "architectures": null,
19
- "attention_dropout": 0.0,
20
- "bad_words_ids": null,
21
- "begin_suppress_tokens": null,
22
- "bos_token_id": 0,
23
- "chunk_size_feed_forward": 0,
24
- "cross_attention_hidden_size": null,
25
- "decoder_start_token_id": null,
26
- "diversity_penalty": 0.0,
27
- "do_sample": false,
28
- "early_stopping": false,
29
- "encoder_no_repeat_ngram_size": 0,
30
- "eos_token_id": 2,
31
- "exponential_decay_length_penalty": null,
32
- "finetuning_task": null,
33
- "forced_bos_token_id": null,
34
- "forced_eos_token_id": null,
35
- "hidden_act": "gelu",
36
- "hidden_size": 512,
37
- "id2label": {
38
- "0": "LABEL_0",
39
- "1": "LABEL_1"
40
- },
41
- "initializer_factor": 1.0,
42
- "initializer_range": 0.02,
43
- "intermediate_size": 2048,
44
- "is_decoder": false,
45
- "is_encoder_decoder": false,
46
- "k_bias": true,
47
- "label2id": {
48
- "LABEL_0": 0,
49
- "LABEL_1": 1
50
- },
51
- "layer_norm_eps": 1e-05,
52
- "length_penalty": 1.0,
53
- "max_length": 20,
54
- "max_position_embeddings": 77,
55
- "min_length": 0,
56
- "model_type": "clip_text_model",
57
- "no_repeat_ngram_size": 0,
58
- "num_attention_heads": 8,
59
- "num_beam_groups": 1,
60
- "num_beams": 1,
61
- "num_hidden_layers": 12,
62
- "num_return_sequences": 1,
63
- "output_attentions": false,
64
- "output_hidden_states": false,
65
- "output_scores": false,
66
- "pad_token_id": 1,
67
- "post_layernorm": false,
68
- "prefix": null,
69
- "problem_type": null,
70
- "projection_dim": 512,
71
- "pruned_heads": {},
72
- "q_bias": true,
73
- "remove_invalid_values": false,
74
- "repetition_penalty": 1.0,
75
- "return_dict": true,
76
- "return_dict_in_generate": false,
77
- "sep_token_id": null,
78
- "suppress_tokens": null,
79
- "task_specific_params": null,
80
- "temperature": 1.0,
81
- "tf_legacy_loss": false,
82
- "tie_encoder_decoder": false,
83
- "tie_word_embeddings": true,
84
- "tokenizer_class": null,
85
- "top_k": 50,
86
- "top_p": 1.0,
87
- "torch_dtype": null,
88
- "torchscript": false,
89
- "transformers_version": "4.44.2",
90
- "typical_p": 1.0,
91
- "use_bfloat16": false,
92
- "v_bias": true,
93
- "vocab_size": 49408
94
- },
95
  "torch_dtype": "float32",
96
  "transformers_version": null,
97
  "vision_config": {
 
12
  "logit_scale_init_value": 2.659260036932778,
13
  "model_type": "clip",
14
  "projection_dim": 1280,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  "torch_dtype": "float32",
16
  "transformers_version": null,
17
  "vision_config": {