Aekanun commited on
Commit
00af854
·
1 Parent(s): 763f819
Files changed (1) hide show
  1. app.py +19 -8
app.py CHANGED
@@ -108,16 +108,16 @@ def load_model():
108
  from transformers import AutoModelForVision2Seq, AutoTokenizer
109
  import torch
110
 
111
- print("กำลังโหลด tokenizer...")
 
 
 
 
112
  # tokenizer = AutoTokenizer.from_pretrained(
113
- # "unsloth/Llama-3.2-11B-Vision-Instruct",
114
- # trust_remote_code=True
 
115
  # )
116
- tokenizer = AutoTokenizer.from_pretrained(
117
- "meta-llama/Llama-3.2-11B-Vision-Instruct",
118
- trust_remote_code=True,
119
- use_auth_token=True
120
- )
121
 
122
  print(f"2. ประเภทของ tokenizer: {type(tokenizer)}")
123
  print(f"3. เมธอดที่มีใน tokenizer: {dir(tokenizer)}")
@@ -146,6 +146,17 @@ def load_model():
146
  def process_image(image):
147
  global model, tokenizer
148
 
 
 
 
 
 
 
 
 
 
 
 
149
  print("Type of model:", type(model))
150
  print("\nใน process_image():")
151
  print("A. Type of tokenizer:", type(tokenizer))
 
108
  from transformers import AutoModelForVision2Seq, AutoTokenizer
109
  import torch
110
 
111
+ # print("กำลังโหลด tokenizer...")
112
+ # # tokenizer = AutoTokenizer.from_pretrained(
113
+ # # "unsloth/Llama-3.2-11B-Vision-Instruct",
114
+ # # trust_remote_code=True
115
+ # # )
116
  # tokenizer = AutoTokenizer.from_pretrained(
117
+ # "meta-llama/Llama-3.2-11B-Vision-Instruct",
118
+ # trust_remote_code=True,
119
+ # use_auth_token=True
120
  # )
 
 
 
 
 
121
 
122
  print(f"2. ประเภทของ tokenizer: {type(tokenizer)}")
123
  print(f"3. เมธอดที่มีใน tokenizer: {dir(tokenizer)}")
 
146
  def process_image(image):
147
  global model, tokenizer
148
 
149
+ ### โหลด tokenizer จาก base model
150
+ from unsloth import FastVisionModel
151
+ from transformers import AutoTokenizer
152
+ print("กำลังโหลด tokenizer...")
153
+ base_model, tokenizer = FastVisionModel.from_pretrained(
154
+ "unsloth/Llama-3.2-11B-Vision-Instruct",
155
+ use_gradient_checkpointing = "unsloth",
156
+ ### device_map="auto" ### เพิ่มตรงนี้
157
+ )
158
+ ###
159
+
160
  print("Type of model:", type(model))
161
  print("\nใน process_image():")
162
  print("A. Type of tokenizer:", type(tokenizer))