nicocambrian commited on
Commit
39e1ad6
·
verified ·
1 Parent(s): 93d497b

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "llava-hf/llava-v1.6-34b-hf",
3
  "architectures": [
4
  "LlavaNextForConditionalGeneration"
5
  ],
@@ -30,13 +30,8 @@
30
  "model_type": "llava_next",
31
  "projector_hidden_act": "gelu",
32
  "quantization_config": {
33
- "backend": "autoawq",
34
  "bits": 4,
35
- "do_fuse": false,
36
- "exllama_config": null,
37
- "fuse_max_seq_len": null,
38
  "group_size": 128,
39
- "modules_to_fuse": null,
40
  "modules_to_not_convert": null,
41
  "quant_method": "awq",
42
  "version": "gemm",
@@ -63,7 +58,7 @@
63
  "vocab_size": 64064
64
  },
65
  "tie_word_embeddings": false,
66
- "torch_dtype": "float16",
67
  "transformers_version": "4.41.1",
68
  "use_image_newline_parameter": true,
69
  "vision_config": {
 
1
  {
2
+ "_name_or_path": "/root/.cache/huggingface/hub/models--llava-hf--llava-v1.6-34b-hf/snapshots/66b6feb83d0249dc9f31a24bd3abfb63f90e41aa",
3
  "architectures": [
4
  "LlavaNextForConditionalGeneration"
5
  ],
 
30
  "model_type": "llava_next",
31
  "projector_hidden_act": "gelu",
32
  "quantization_config": {
 
33
  "bits": 4,
 
 
 
34
  "group_size": 128,
 
35
  "modules_to_not_convert": null,
36
  "quant_method": "awq",
37
  "version": "gemm",
 
58
  "vocab_size": 64064
59
  },
60
  "tie_word_embeddings": false,
61
+ "torch_dtype": "bfloat16",
62
  "transformers_version": "4.41.1",
63
  "use_image_newline_parameter": true,
64
  "vision_config": {
generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 7,
6
+ "pad_token_id": 0,
7
+ "transformers_version": "4.41.1",
8
+ "use_cache": false
9
+ }
model-00001-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2aed3b19835afc58f01e51474c00dd4b0eb84e12259d10cd734947e7578775f
3
+ size 4968588192
model-00002-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25ef60b1167b9d7b8118dac6871766a61c349e043aeeb03cff0ca223089535e8
3
+ size 4927419744
model-00003-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a68886a2d629cdffdfd541c3448e2ff14d5e725ab2b6a3c516fc98a142dae46
3
+ size 4927419744
model-00004-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b962f064abc068d03fabf9822022d5f9f76279b16e3f75e85a49c807beff7ad
3
+ size 4210454256
model-00005-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f22985919b00b953dfcbc1eaa62743797554481b485eea2c763a98cac5e8f247
3
+ size 918421648
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
preprocessor_config.json ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "aspect_ratio_setting": "anyres",
3
+ "crop_size": {
4
+ "height": 336,
5
+ "width": 336
6
+ },
7
+ "do_center_crop": true,
8
+ "do_convert_rgb": true,
9
+ "do_normalize": true,
10
+ "do_pad": true,
11
+ "do_rescale": true,
12
+ "do_resize": true,
13
+ "image_grid_pinpoints": [
14
+ [
15
+ 336,
16
+ 672
17
+ ],
18
+ [
19
+ 672,
20
+ 336
21
+ ],
22
+ [
23
+ 672,
24
+ 672
25
+ ],
26
+ [
27
+ 1008,
28
+ 336
29
+ ],
30
+ [
31
+ 336,
32
+ 1008
33
+ ]
34
+ ],
35
+ "image_mean": [
36
+ 0.48145466,
37
+ 0.4578275,
38
+ 0.40821073
39
+ ],
40
+ "image_processor_type": "LlavaNextImageProcessor",
41
+ "image_std": [
42
+ 0.26862954,
43
+ 0.26130258,
44
+ 0.27577711
45
+ ],
46
+ "processor_class": "LlavaNextProcessor",
47
+ "resample": 3,
48
+ "rescale_factor": 0.00392156862745098,
49
+ "size": {
50
+ "shortest_edge": 336
51
+ }
52
+ }