Sakalti commited on
Commit
59e8362
·
verified ·
1 Parent(s): 396f847

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  import requests
3
  import torch
4
- from transformers import AutoModel
5
  from huggingface_hub import HfApi
6
  import safetensors
7
  import os
@@ -29,13 +29,16 @@ def convert_and_deploy(url, repo_id, hf_token):
29
 
30
  # モデルを読み込み
31
  try:
 
 
 
 
 
 
32
  # セーフテンソルファイルからモデルの状態を読み込み
33
  with safetensors.safe_open(file_path, framework="pt") as f:
34
  state_dict = {k: f.get_tensor(k) for k in f.keys()}
35
 
36
- # モデルを初期化
37
- model = AutoModel.from_config(AutoModel.from_pretrained("Sakalti/Qwen-float16-0.5b", torch_dtype=torch.float16, token=hf_token).config)
38
-
39
  # BF16からFP16に変換
40
  state_dict = {k: v.to(torch.float16) for k, v in state_dict.items()}
41
 
 
1
  import gradio as gr
2
  import requests
3
  import torch
4
+ from transformers import AutoModel, AutoConfig
5
  from huggingface_hub import HfApi
6
  import safetensors
7
  import os
 
29
 
30
  # モデルを読み込み
31
  try:
32
+ # モデルの構成を取得
33
+ config = AutoConfig.from_pretrained("Sakalti/Qwen-float16-0.5b", token=hf_token)
34
+
35
+ # モデルを構成に基づいて初期化
36
+ model = AutoModel.from_config(config, torch_dtype=torch.float16)
37
+
38
  # セーフテンソルファイルからモデルの状態を読み込み
39
  with safetensors.safe_open(file_path, framework="pt") as f:
40
  state_dict = {k: f.get_tensor(k) for k in f.keys()}
41
 
 
 
 
42
  # BF16からFP16に変換
43
  state_dict = {k: v.to(torch.float16) for k, v in state_dict.items()}
44