Malum0x commited on
Commit
617e5e5
·
1 Parent(s): b7610c9

fix: hf adjustments

Browse files
Files changed (1) hide show
  1. app.py +105 -26
app.py CHANGED
@@ -5,73 +5,152 @@ import tempfile
5
  import os
6
  import requests
7
  import json
 
 
 
 
 
8
 
9
  def inspect_model(model_id, config_file=None):
 
 
10
  if not model_id or '/' not in model_id:
11
  return "Please provide a valid model ID in the format username/modelname", "No config loaded."
12
 
13
  username, modelname = model_id.split('/', 1)
 
14
 
15
- model_url = f"https://huggingface.co/{username}/{modelname}/resolve/main/model.safetensors"
 
16
 
17
  try:
18
- response = requests.get(model_url)
 
 
 
 
 
 
 
 
 
 
19
  response.raise_for_status()
20
 
21
-
 
 
22
  with tempfile.NamedTemporaryFile(delete=False, suffix=".safetensors") as tmp:
23
- tmp.write(response.content)
 
 
 
 
 
 
 
 
 
 
24
  model_path = tmp.name
 
25
 
26
-
27
  summary = load_model_summary(model_path)
 
28
  state_dict = load_file(model_path)
 
29
  top_layers = get_top_layers(state_dict, summary["total_params"])
30
  top_layers_str = "\n".join([
31
  f"{layer['name']}: shape={layer['shape']}, params={layer['params']:,} ({layer['percent']}%)"
32
  for layer in top_layers
33
  ])
34
 
35
-
36
  config_data = {}
37
  if config_file is not None:
 
38
  with tempfile.NamedTemporaryFile(delete=False, suffix=".json") as tmp_cfg:
39
  tmp_cfg.write(config_file.read())
40
- config_data = load_config(tmp_cfg.name)
41
- os.unlink(tmp_cfg.name)
 
 
 
42
  else:
43
  config_url = f"https://huggingface.co/{username}/{modelname}/resolve/main/config.json"
 
44
  try:
45
  config_response = requests.get(config_url)
46
  config_response.raise_for_status()
47
  config_data = json.loads(config_response.content)
 
48
  except Exception as e:
49
- pass
50
 
51
  config_str = "\n".join([f"{k}: {v}" for k, v in config_data.items()]) if config_data else "No config loaded."
52
 
 
 
53
  os.unlink(model_path)
54
 
55
- return (
56
  f" Total tensors: {summary['num_tensors']}\n"
57
  f" Total parameters: {summary['total_params']:,}\n\n"
58
- f" Top Layers:\n{top_layers_str}",
59
- config_str
60
  )
 
 
 
61
 
62
  except Exception as e:
63
- return f"Error: {str(e)}", "No config loaded."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
 
65
- gr.Interface(
66
- fn=inspect_model,
67
- inputs=[
68
- gr.Textbox(label="Model ID from HuggingFace", placeholder="username/modelname", lines=1),
69
- gr.File(label="Upload config.json (optional)", type="binary")
70
- ],
71
- outputs=[
72
- gr.Textbox(label="Model Summary", lines=15),
73
- gr.Textbox(label="Config", lines=10)
74
- ],
75
- title="Model Inspector",
76
- description="Enter a HuggingFace model ID in the format username/modelname to analyze its structure, parameter count, and configuration."
77
- ).launch()
 
5
  import os
6
  import requests
7
  import json
8
+ import logging
9
+ import traceback
10
+
11
+ logging.basicConfig(level=logging.INFO)
12
+ logger = logging.getLogger(__name__)
13
 
14
  def inspect_model(model_id, config_file=None):
15
+ logger.info(f"Processing model ID: {model_id}")
16
+
17
  if not model_id or '/' not in model_id:
18
  return "Please provide a valid model ID in the format username/modelname", "No config loaded."
19
 
20
  username, modelname = model_id.split('/', 1)
21
+ logger.info(f"Username: {username}, Model name: {modelname}")
22
 
23
+ model_summary = "Processing..."
24
+ config_str = "No config loaded."
25
 
26
  try:
27
+ model_filename = "model.safetensors"
28
+ if "/" in modelname:
29
+ parts = modelname.split("/")
30
+ modelname = parts[0]
31
+ if len(parts) > 1 and parts[1].strip():
32
+ model_filename = parts[1]
33
+
34
+ model_url = f"https://huggingface.co/{username}/{modelname}/resolve/main/{model_filename}"
35
+ logger.info(f"Attempting to download model from: {model_url}")
36
+
37
+ response = requests.get(model_url, stream=True)
38
  response.raise_for_status()
39
 
40
+ total_size = int(response.headers.get('content-length', 0))
41
+ logger.info(f"Model file size: {total_size/1024/1024:.2f} MB")
42
+
43
  with tempfile.NamedTemporaryFile(delete=False, suffix=".safetensors") as tmp:
44
+ if total_size > 0:
45
+ downloaded = 0
46
+ for chunk in response.iter_content(chunk_size=8192):
47
+ if chunk:
48
+ tmp.write(chunk)
49
+ downloaded += len(chunk)
50
+ if downloaded % (100 * 1024 * 1024) == 0:
51
+ logger.info(f"Downloaded {downloaded/1024/1024:.2f} MB / {total_size/1024/1024:.2f} MB")
52
+ else:
53
+ tmp.write(response.content)
54
+
55
  model_path = tmp.name
56
+ logger.info(f"Model downloaded to temporary file: {model_path}")
57
 
58
+ logger.info("Loading model summary...")
59
  summary = load_model_summary(model_path)
60
+ logger.info(f"Loading state dictionary... (This may take time for large models)")
61
  state_dict = load_file(model_path)
62
+ logger.info("Analyzing top layers...")
63
  top_layers = get_top_layers(state_dict, summary["total_params"])
64
  top_layers_str = "\n".join([
65
  f"{layer['name']}: shape={layer['shape']}, params={layer['params']:,} ({layer['percent']}%)"
66
  for layer in top_layers
67
  ])
68
 
 
69
  config_data = {}
70
  if config_file is not None:
71
+ logger.info("Processing uploaded config file")
72
  with tempfile.NamedTemporaryFile(delete=False, suffix=".json") as tmp_cfg:
73
  tmp_cfg.write(config_file.read())
74
+ config_path = tmp_cfg.name
75
+
76
+ logger.info(f"Loading config from uploaded file: {config_path}")
77
+ config_data = load_config(config_path)
78
+ os.unlink(config_path)
79
  else:
80
  config_url = f"https://huggingface.co/{username}/{modelname}/resolve/main/config.json"
81
+ logger.info(f"Attempting to download config from: {config_url}")
82
  try:
83
  config_response = requests.get(config_url)
84
  config_response.raise_for_status()
85
  config_data = json.loads(config_response.content)
86
+ logger.info("Config file downloaded and parsed successfully")
87
  except Exception as e:
88
+ logger.warning(f"Could not download or parse config file: {str(e)}")
89
 
90
  config_str = "\n".join([f"{k}: {v}" for k, v in config_data.items()]) if config_data else "No config loaded."
91
 
92
+ # Clean up temporary file
93
+ logger.info(f"Cleaning up temporary file: {model_path}")
94
  os.unlink(model_path)
95
 
96
+ model_summary = (
97
  f" Total tensors: {summary['num_tensors']}\n"
98
  f" Total parameters: {summary['total_params']:,}\n\n"
99
+ f" Top Layers:\n{top_layers_str}"
 
100
  )
101
+ logger.info("Model inspection completed successfully")
102
+
103
+ return model_summary, config_str
104
 
105
  except Exception as e:
106
+ error_msg = f"Error: {str(e)}\n\nTraceback:\n{traceback.format_exc()}"
107
+ logger.error(error_msg)
108
+ return error_msg, "No config loaded."
109
+
110
+ with gr.Blocks(title="Model Inspector") as demo:
111
+ gr.Markdown("# Model Inspector")
112
+ gr.Markdown("Enter a HuggingFace model ID in the format username/modelname to analyze its structure, parameter count, and configuration.")
113
+ gr.Markdown("You can specify a custom safetensors file by using username/modelname/filename.safetensors")
114
+
115
+ with gr.Row():
116
+ with gr.Column():
117
+ model_id = gr.Textbox(
118
+ label="Model ID from HuggingFace",
119
+ placeholder="username/modelname",
120
+ lines=1
121
+ )
122
+ config_file = gr.File(
123
+ label="Upload config.json (optional)",
124
+ type="binary"
125
+ )
126
+ submit_btn = gr.Button("Analyze Model", variant="primary")
127
+ status = gr.Markdown("Ready. Enter a model ID and click 'Analyze Model'")
128
+
129
+ with gr.Column():
130
+ model_summary = gr.Textbox(label="Model Summary", lines=15)
131
+ config_output = gr.Textbox(label="Config", lines=10)
132
+
133
+ def update_status(text):
134
+ return text
135
+
136
+ def on_submit(model_id, config_file):
137
+ status_update = update_status("Processing... This may take some time for large models.")
138
+ yield status_update, None, None
139
+
140
+ try:
141
+ summary, config = inspect_model(model_id, config_file)
142
+ status_update = update_status("Analysis complete!")
143
+ yield status_update, summary, config
144
+ except Exception as e:
145
+ error_msg = f"Error during analysis: {str(e)}"
146
+ status_update = update_status(f"❌ {error_msg}")
147
+ yield status_update, error_msg, "No config loaded."
148
+
149
+ submit_btn.click(
150
+ fn=on_submit,
151
+ inputs=[model_id, config_file],
152
+ outputs=[status, model_summary, config_output],
153
+ show_progress=True
154
+ )
155
 
156
+ demo.launch()