czczup commited on
Commit
4a03a6f
1 Parent(s): ffe5d3e

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. README.md +4 -0
  2. configuration_intern_vit.py +1 -1
README.md CHANGED
@@ -144,6 +144,7 @@ model = AutoModel.from_pretrained(
144
  path,
145
  torch_dtype=torch.bfloat16,
146
  low_cpu_mem_usage=True,
 
147
  trust_remote_code=True).eval().cuda()
148
  ```
149
 
@@ -158,6 +159,7 @@ model = AutoModel.from_pretrained(
158
  torch_dtype=torch.bfloat16,
159
  load_in_8bit=True,
160
  low_cpu_mem_usage=True,
 
161
  trust_remote_code=True).eval()
162
  ```
163
 
@@ -206,6 +208,7 @@ model = AutoModel.from_pretrained(
206
  path,
207
  torch_dtype=torch.bfloat16,
208
  low_cpu_mem_usage=True,
 
209
  trust_remote_code=True,
210
  device_map=device_map).eval()
211
  ```
@@ -302,6 +305,7 @@ model = AutoModel.from_pretrained(
302
  path,
303
  torch_dtype=torch.bfloat16,
304
  low_cpu_mem_usage=True,
 
305
  trust_remote_code=True).eval().cuda()
306
  tokenizer = AutoTokenizer.from_pretrained(path, trust_remote_code=True, use_fast=False)
307
 
 
144
  path,
145
  torch_dtype=torch.bfloat16,
146
  low_cpu_mem_usage=True,
147
+ use_flash_attn=False,
148
  trust_remote_code=True).eval().cuda()
149
  ```
150
 
 
159
  torch_dtype=torch.bfloat16,
160
  load_in_8bit=True,
161
  low_cpu_mem_usage=True,
162
+ use_flash_attn=False,
163
  trust_remote_code=True).eval()
164
  ```
165
 
 
208
  path,
209
  torch_dtype=torch.bfloat16,
210
  low_cpu_mem_usage=True,
211
+ use_flash_attn=False,
212
  trust_remote_code=True,
213
  device_map=device_map).eval()
214
  ```
 
305
  path,
306
  torch_dtype=torch.bfloat16,
307
  low_cpu_mem_usage=True,
308
+ use_flash_attn=False,
309
  trust_remote_code=True).eval().cuda()
310
  tokenizer = AutoTokenizer.from_pretrained(path, trust_remote_code=True, use_fast=False)
311
 
configuration_intern_vit.py CHANGED
@@ -71,7 +71,7 @@ class InternVisionConfig(PretrainedConfig):
71
  intermediate_size=12800,
72
  qk_normalization=True,
73
  num_hidden_layers=48,
74
- use_flash_attn=True,
75
  hidden_act='gelu',
76
  norm_type='rms_norm',
77
  layer_norm_eps=1e-6,
 
71
  intermediate_size=12800,
72
  qk_normalization=True,
73
  num_hidden_layers=48,
74
+ use_flash_attn=False,
75
  hidden_act='gelu',
76
  norm_type='rms_norm',
77
  layer_norm_eps=1e-6,