Zhang Hui
commited on
Commit
·
637f9c3
1
Parent(s):
ff8d687
add weights
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- config.json +54 -0
- configuration_qwen.py +71 -0
- cpp_kernels.py +55 -0
- generation_config.json +10 -0
- model-00001-of-00085.safetensors +3 -0
- model-00002-of-00085.safetensors +3 -0
- model-00003-of-00085.safetensors +3 -0
- model-00004-of-00085.safetensors +3 -0
- model-00005-of-00085.safetensors +3 -0
- model-00006-of-00085.safetensors +3 -0
- model-00007-of-00085.safetensors +3 -0
- model-00008-of-00085.safetensors +3 -0
- model-00009-of-00085.safetensors +3 -0
- model-00010-of-00085.safetensors +3 -0
- model-00011-of-00085.safetensors +3 -0
- model-00012-of-00085.safetensors +3 -0
- model-00013-of-00085.safetensors +3 -0
- model-00014-of-00085.safetensors +3 -0
- model-00015-of-00085.safetensors +3 -0
- model-00016-of-00085.safetensors +3 -0
- model-00017-of-00085.safetensors +3 -0
- model-00018-of-00085.safetensors +3 -0
- model-00019-of-00085.safetensors +3 -0
- model-00020-of-00085.safetensors +3 -0
- model-00021-of-00085.safetensors +3 -0
- model-00022-of-00085.safetensors +3 -0
- model-00023-of-00085.safetensors +3 -0
- model-00024-of-00085.safetensors +3 -0
- model-00025-of-00085.safetensors +3 -0
- model-00026-of-00085.safetensors +3 -0
- model-00027-of-00085.safetensors +3 -0
- model-00028-of-00085.safetensors +3 -0
- model-00029-of-00085.safetensors +3 -0
- model-00030-of-00085.safetensors +3 -0
- model-00031-of-00085.safetensors +3 -0
- model-00032-of-00085.safetensors +3 -0
- model-00033-of-00085.safetensors +3 -0
- model-00034-of-00085.safetensors +3 -0
- model-00035-of-00085.safetensors +3 -0
- model-00036-of-00085.safetensors +3 -0
- model-00037-of-00085.safetensors +3 -0
- model-00038-of-00085.safetensors +3 -0
- model-00039-of-00085.safetensors +3 -0
- model-00040-of-00085.safetensors +3 -0
- model-00041-of-00085.safetensors +3 -0
- model-00042-of-00085.safetensors +3 -0
- model-00043-of-00085.safetensors +3 -0
- model-00044-of-00085.safetensors +3 -0
- model-00045-of-00085.safetensors +3 -0
- model-00046-of-00085.safetensors +3 -0
config.json
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/mnt/data/zhanghui/output-ckpts/lvlm/v5/finetune_lora_4/chat_2000",
|
3 |
+
"architectures": [
|
4 |
+
"QWenLMHeadModel"
|
5 |
+
],
|
6 |
+
"attn_dropout_prob": 0.0,
|
7 |
+
"auto_map": {
|
8 |
+
"AutoConfig": "configuration_qwen.QWenConfig",
|
9 |
+
"AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
|
10 |
+
},
|
11 |
+
"bf16": true,
|
12 |
+
"emb_dropout_prob": 0.0,
|
13 |
+
"fp16": false,
|
14 |
+
"fp32": false,
|
15 |
+
"hidden_size": 8192,
|
16 |
+
"initializer_range": 0.02,
|
17 |
+
"intermediate_size": 49152,
|
18 |
+
"kv_channels": 128,
|
19 |
+
"layer_norm_epsilon": 1e-05,
|
20 |
+
"max_position_embeddings": 32768,
|
21 |
+
"model_type": "qwen",
|
22 |
+
"no_bias": true,
|
23 |
+
"num_attention_heads": 64,
|
24 |
+
"num_hidden_layers": 80,
|
25 |
+
"onnx_safe": null,
|
26 |
+
"padded_vocab_size": 152064,
|
27 |
+
"rope_theta": 1000000,
|
28 |
+
"rotary_emb_base": 1000000,
|
29 |
+
"rotary_pct": 1.0,
|
30 |
+
"scale_attn_weights": true,
|
31 |
+
"seq_length": 32768,
|
32 |
+
"softmax_in_fp32": false,
|
33 |
+
"tie_word_embeddings": false,
|
34 |
+
"tokenizer_type": "QWenTokenizer",
|
35 |
+
"torch_dtype": "bfloat16",
|
36 |
+
"transformers_version": "4.32.0",
|
37 |
+
"use_cache": false,
|
38 |
+
"use_cache_kernel": false,
|
39 |
+
"use_cache_quantization": false,
|
40 |
+
"use_dynamic_ntk": false,
|
41 |
+
"use_flash_attn": true,
|
42 |
+
"use_logn_attn": false,
|
43 |
+
"visual": {
|
44 |
+
"heads": 16,
|
45 |
+
"image_size": 448,
|
46 |
+
"image_start_id": 151857,
|
47 |
+
"layers": 48,
|
48 |
+
"mlp_ratio": 4.9231,
|
49 |
+
"output_dim": 8192,
|
50 |
+
"patch_size": 14,
|
51 |
+
"width": 1664
|
52 |
+
},
|
53 |
+
"vocab_size": 152064
|
54 |
+
}
|
configuration_qwen.py
ADDED
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Alibaba Cloud.
|
2 |
+
#
|
3 |
+
# This source code is licensed under the license found in the
|
4 |
+
# LICENSE file in the root directory of this source tree.
|
5 |
+
|
6 |
+
from transformers import PretrainedConfig
|
7 |
+
|
8 |
+
|
9 |
+
class QWenConfig(PretrainedConfig):
|
10 |
+
model_type = "qwen"
|
11 |
+
keys_to_ignore_at_inference = ["past_key_values"]
|
12 |
+
|
13 |
+
def __init__(
|
14 |
+
self,
|
15 |
+
vocab_size=151936,
|
16 |
+
hidden_size=4096,
|
17 |
+
num_hidden_layers=32,
|
18 |
+
num_attention_heads=32,
|
19 |
+
emb_dropout_prob=0.0,
|
20 |
+
attn_dropout_prob=0.0,
|
21 |
+
layer_norm_epsilon=1e-6,
|
22 |
+
initializer_range=0.02,
|
23 |
+
max_position_embeddings=8192,
|
24 |
+
scale_attn_weights=True,
|
25 |
+
use_cache=True,
|
26 |
+
bf16=False,
|
27 |
+
fp16=False,
|
28 |
+
fp32=False,
|
29 |
+
kv_channels=128,
|
30 |
+
rotary_pct=1.0,
|
31 |
+
rotary_emb_base=10000,
|
32 |
+
use_dynamic_ntk=True,
|
33 |
+
use_logn_attn=True,
|
34 |
+
use_flash_attn="auto",
|
35 |
+
intermediate_size=22016,
|
36 |
+
no_bias=True,
|
37 |
+
tie_word_embeddings=False,
|
38 |
+
use_cache_quantization=False,
|
39 |
+
use_cache_kernel=False,
|
40 |
+
softmax_in_fp32=False,
|
41 |
+
**kwargs,
|
42 |
+
):
|
43 |
+
self.vocab_size = vocab_size
|
44 |
+
self.hidden_size = hidden_size
|
45 |
+
self.intermediate_size = intermediate_size
|
46 |
+
self.num_hidden_layers = num_hidden_layers
|
47 |
+
self.num_attention_heads = num_attention_heads
|
48 |
+
self.emb_dropout_prob = emb_dropout_prob
|
49 |
+
self.attn_dropout_prob = attn_dropout_prob
|
50 |
+
self.layer_norm_epsilon = layer_norm_epsilon
|
51 |
+
self.initializer_range = initializer_range
|
52 |
+
self.scale_attn_weights = scale_attn_weights
|
53 |
+
self.use_cache = use_cache
|
54 |
+
self.max_position_embeddings = max_position_embeddings
|
55 |
+
self.bf16 = bf16
|
56 |
+
self.fp16 = fp16
|
57 |
+
self.fp32 = fp32
|
58 |
+
self.kv_channels = kv_channels
|
59 |
+
self.rotary_pct = rotary_pct
|
60 |
+
self.rotary_emb_base = rotary_emb_base
|
61 |
+
self.use_dynamic_ntk = use_dynamic_ntk
|
62 |
+
self.use_logn_attn = use_logn_attn
|
63 |
+
self.use_flash_attn = use_flash_attn
|
64 |
+
self.no_bias = no_bias
|
65 |
+
self.use_cache_quantization = use_cache_quantization
|
66 |
+
self.use_cache_kernel = use_cache_kernel
|
67 |
+
self.softmax_in_fp32 = softmax_in_fp32
|
68 |
+
super().__init__(
|
69 |
+
tie_word_embeddings=tie_word_embeddings,
|
70 |
+
**kwargs
|
71 |
+
)
|
cpp_kernels.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from torch.utils import cpp_extension
|
2 |
+
import pathlib
|
3 |
+
import os
|
4 |
+
import subprocess
|
5 |
+
|
6 |
+
def _get_cuda_bare_metal_version(cuda_dir):
|
7 |
+
raw_output = subprocess.check_output([cuda_dir + "/bin/nvcc", "-V"],
|
8 |
+
universal_newlines=True)
|
9 |
+
output = raw_output.split()
|
10 |
+
release_idx = output.index("release") + 1
|
11 |
+
release = output[release_idx].split(".")
|
12 |
+
bare_metal_major = release[0]
|
13 |
+
bare_metal_minor = release[1][0]
|
14 |
+
|
15 |
+
return raw_output, bare_metal_major, bare_metal_minor
|
16 |
+
|
17 |
+
def _create_build_dir(buildpath):
|
18 |
+
try:
|
19 |
+
os.mkdir(buildpath)
|
20 |
+
except OSError:
|
21 |
+
if not os.path.isdir(buildpath):
|
22 |
+
print(f"Creation of the build directory {buildpath} failed")
|
23 |
+
|
24 |
+
# Check if cuda 11 is installed for compute capability 8.0
|
25 |
+
cc_flag = []
|
26 |
+
_, bare_metal_major, bare_metal_minor = _get_cuda_bare_metal_version(cpp_extension.CUDA_HOME)
|
27 |
+
if int(bare_metal_major) >= 11:
|
28 |
+
cc_flag.append('-gencode')
|
29 |
+
cc_flag.append('arch=compute_80,code=sm_80')
|
30 |
+
if int(bare_metal_minor) >= 7:
|
31 |
+
cc_flag.append('-gencode')
|
32 |
+
cc_flag.append('arch=compute_90,code=sm_90')
|
33 |
+
|
34 |
+
# Build path
|
35 |
+
srcpath = pathlib.Path(__file__).parent.absolute()
|
36 |
+
buildpath = srcpath / 'build'
|
37 |
+
_create_build_dir(buildpath)
|
38 |
+
|
39 |
+
def _cpp_extention_load_helper(name, sources, extra_cuda_flags):
|
40 |
+
return cpp_extension.load(
|
41 |
+
name=name,
|
42 |
+
sources=sources,
|
43 |
+
build_directory=buildpath,
|
44 |
+
extra_cflags=['-O3', ],
|
45 |
+
extra_cuda_cflags=['-O3',
|
46 |
+
'-gencode', 'arch=compute_70,code=sm_70',
|
47 |
+
'--use_fast_math'] + extra_cuda_flags + cc_flag,
|
48 |
+
verbose=1
|
49 |
+
)
|
50 |
+
|
51 |
+
extra_flags = []
|
52 |
+
|
53 |
+
cache_autogptq_cuda_256_sources = ["./cache_autogptq_cuda_256.cpp",
|
54 |
+
"./cache_autogptq_cuda_kernel_256.cu"]
|
55 |
+
cache_autogptq_cuda_256 = _cpp_extention_load_helper("cache_autogptq_cuda_256", cache_autogptq_cuda_256_sources, extra_flags)
|
generation_config.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"chat_format": "chatml",
|
3 |
+
"eos_token_id": 151643,
|
4 |
+
"max_new_tokens": 512,
|
5 |
+
"max_window_size": 6144,
|
6 |
+
"pad_token_id": 151643,
|
7 |
+
"repetition_penalty": 1.1,
|
8 |
+
"transformers_version": "4.32.0",
|
9 |
+
"trust_remote_code": true
|
10 |
+
}
|
model-00001-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:71f121603f83009d633cfc65772d2e9f3d321e1aeb9c0afbf775a82b5f1d2f30
|
3 |
+
size 2491416712
|
model-00002-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7c938d6f731a351971cc8ebe563dd2fdad35799c20e257d9278dada505f4f684
|
3 |
+
size 1744929752
|
model-00003-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:170df483deca86bc9258dd8e050f575719914cb7825144dae298e9bdf9fbaeeb
|
3 |
+
size 1744913264
|
model-00004-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e8e2355a620a2ef5488d030c3b67963f9a3ef89e1aaf92e5b0df295a262921c1
|
3 |
+
size 1744913264
|
model-00005-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d9ebd74a41b672c5c1fd70c694e635b1425c2060de53c8bda4514e9fe3cd03ff
|
3 |
+
size 1744913264
|
model-00006-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:878d21fa47d7568624d423bb3974f8f7c8e54605dbf630ad2ed5bd45c67143df
|
3 |
+
size 1744913264
|
model-00007-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b03743695ced8738aeaff1bd94e99af52d1b9b982d9bf5af9709b9779f32d6f7
|
3 |
+
size 1744913264
|
model-00008-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ff2f43baf5d0e747bf2e10369f047122f51257e542a4d00e47c601790c2d9119
|
3 |
+
size 1744913264
|
model-00009-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c9eda83888e3d72da1aa426c8626f09c637bdbf445fca3d849d1fb07e5fea2c5
|
3 |
+
size 1744913264
|
model-00010-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aac21787776fd4d797403a06bb4ad22d919f7718150d6f35bdd5a001620fc9f4
|
3 |
+
size 1744913264
|
model-00011-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:337949d4dd774da92830ed242d10e199a84df6448c0d8eabf6eb82c696d78e7b
|
3 |
+
size 1744913256
|
model-00012-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8cfba972f15e1f7198280c7fbaa3e76de4e9f8b3ac30ceb653d48095afdfdae7
|
3 |
+
size 1744913272
|
model-00013-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:627855f60accd2abf9b454f28b856a45ebcc9a103aa9d5974d2e35687c317494
|
3 |
+
size 1744913272
|
model-00014-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ba5a8fc5a9cc6f1d7fdd3e3803316ecf464dce0c7fd089ed7e2ad63c55741031
|
3 |
+
size 1744913272
|
model-00015-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f8c941159d28fec11d5c15345c1e3fbfcf0c227ab125e8de72a721bdfc61103e
|
3 |
+
size 1744913272
|
model-00016-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:190a4b0033e940d59c7c1edea6ee14427cb33bfca6303a928fbaa1cfeb5cd462
|
3 |
+
size 1744913272
|
model-00017-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:913de58c7c2f98f0ffa55645fe9aa14734e4579c07c605946ec83fe0201a930a
|
3 |
+
size 1744913272
|
model-00018-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:53f1d07f2168c07e8c7bcc6796cec0d12ba8a121f42e7a300658bfe74fb4c76a
|
3 |
+
size 1744913272
|
model-00019-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4877b2da422241b797e95e9a76dc77e34db7c83936b441839900bcc86f85a676
|
3 |
+
size 1744913272
|
model-00020-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:14d6134c14eea051a34d8ce0887ffb1ddd8b95828eb7c031020a6ca73116f177
|
3 |
+
size 1744913272
|
model-00021-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0c43ffcd0c87bbd93be0c8d1e4f6c6d5babb2210eebd48b716de7cec76a9b3a4
|
3 |
+
size 1744913272
|
model-00022-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5eaf3ec86973720a894216bc4d12dab9950561439db224651a1f9f90f6bf4a20
|
3 |
+
size 1744913272
|
model-00023-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:321f5d668f2c3c22dde40fd042482ba8d736aee99b2ff64376142e29a61b2365
|
3 |
+
size 1744913272
|
model-00024-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4a450e78a1c43f2d4466f6800db402a5cda2d1acc152c10b4cb39122ee37ff70
|
3 |
+
size 1744913272
|
model-00025-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:15ecd2c3af23c2107d287f1759faccf43efefb4dbc08ffc519718cd76cd9ea70
|
3 |
+
size 1744913272
|
model-00026-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d26b247ab89901ed99831c587c17dcbb9a50491284eed68906e096070975b382
|
3 |
+
size 1744913272
|
model-00027-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:83fe7e3417964a71d867b2261941e9e334db201e8d2d211fa379199044421298
|
3 |
+
size 1744913272
|
model-00028-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2d0d65934d9cbd0995b6b992730874e7a956485efbff5e6562cb3f42c945538d
|
3 |
+
size 1744913272
|
model-00029-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9474129cc10ea799035ce3d3beeb6765327d248a8758b25a0246dc8f6459aa6e
|
3 |
+
size 1744913272
|
model-00030-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:11eacb6a5e584f331bd3bdfdf074c2f3adb850c836e9021f8e790355e5e205ac
|
3 |
+
size 1744913272
|
model-00031-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:585b36679c9148589003569a92fd03249e8c69351438e2ac62e5ce70384957a0
|
3 |
+
size 1744913272
|
model-00032-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b55964c68bd803e7b2c5127aa754a70e075c7938361f9c6191481f5db742e41
|
3 |
+
size 1744913272
|
model-00033-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0d27651706f4cb9248a5b71c6bf8cd8110f992c99a9d6aef5e7bf3f1988a055a
|
3 |
+
size 1744913272
|
model-00034-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd7bca3c8355e004d7873e5a896ad6d08a4d591c113912fd2857d238c6f3cada
|
3 |
+
size 1744913272
|
model-00035-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:764940ef12cd48f6c5d1d95e539a496a75123c37ac5475ef92229437f5f4b65e
|
3 |
+
size 1744913272
|
model-00036-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6b88df706737908d8fd93feb496b5be6dd1e117ce27a720dae48d855e8b80aef
|
3 |
+
size 1744913272
|
model-00037-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:61e0fcbc5d859a20b223cf688023198b62b1c92b2a28bc30f6c3d4a353bae478
|
3 |
+
size 1744913272
|
model-00038-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:56c67d6cc60e5ffccbc0e12e3ad65acfe27d2b7e82a9d68d5225fbea6563eb77
|
3 |
+
size 1744913272
|
model-00039-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f4fc89e70e3c2858117acf78cf559cfe05ac2ae89a26bb91eabe1fffc1b88ac2
|
3 |
+
size 1744913272
|
model-00040-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:45996a01e7a47b7aefe829e0bc6de00b6611f6b1046d24000e9a68dfb3a9a08c
|
3 |
+
size 1744913272
|
model-00041-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:56141501d961a1041b96c9016302c780a7ac0e1323f7ada19d55411563c9cc06
|
3 |
+
size 1744913272
|
model-00042-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e3b72b80df7b54bd49af8ee0f8e9db0131b8a68a602e817596f2da32128e9495
|
3 |
+
size 1744913272
|
model-00043-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3011d4e557ff64d0f26f6b4edd7d4f9215aec4cc1dbd5165f333d44812285e83
|
3 |
+
size 1744913272
|
model-00044-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:714d040ecb09b72a667d295b21c7d65f5ffa971923299a63fe636648d8f26df1
|
3 |
+
size 1744913272
|
model-00045-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fec9c56cbc8c33308792dbd2370cc3727a8516f3d105cacb94f40c87637dadc7
|
3 |
+
size 1744913272
|
model-00046-of-00085.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d9404286c6de730b52f66dd558cb42178f3628be658425c02e3d9834cbab9500
|
3 |
+
size 1744913272
|