Malum0x commited on
Commit
b7610c9
·
1 Parent(s): 5c53063

fix: hf adjustments

Browse files
Files changed (1) hide show
  1. app.py +57 -29
app.py CHANGED
@@ -3,41 +3,69 @@ from safetensors.torch import load_file
3
  from model_loader import get_top_layers, load_model_summary, load_config
4
  import tempfile
5
  import os
 
 
6
 
7
- def inspect_model(model_file, config_file=None):
8
- with tempfile.NamedTemporaryFile(delete=False, suffix=".safetensors") as tmp:
9
- tmp.write(model_file.read())
10
- model_path = tmp.name
 
 
 
 
 
 
 
 
11
 
12
- summary = load_model_summary(model_path)
13
- state_dict = load_file(model_path)
14
- top_layers = get_top_layers(state_dict, summary["total_params"])
 
15
 
16
- top_layers_str = "\n".join([
17
- f"{layer['name']}: shape={layer['shape']}, params={layer['params']:,} ({layer['percent']}%)"
18
- for layer in top_layers
19
- ])
 
 
 
 
20
 
21
-
22
- config_data = {}
23
- if config_file is not None:
24
- with tempfile.NamedTemporaryFile(delete=False, suffix=".json") as tmp_cfg:
25
- tmp_cfg.write(config_file.read())
26
- config_data = load_config(tmp_cfg.name)
27
-
28
- config_str = "\n".join([f"{k}: {v}" for k, v in config_data.items()]) if config_data else "No config loaded."
29
-
30
- return (
31
- f" Total tensors: {summary['num_tensors']}\n"
32
- f" Total parameters: {summary['total_params']:,}\n\n"
33
- f" Top Layers:\n{top_layers_str}",
34
- config_str
35
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
37
  gr.Interface(
38
  fn=inspect_model,
39
  inputs=[
40
- gr.File(label="Upload model (.safetensors)", type="binary"),
41
  gr.File(label="Upload config.json (optional)", type="binary")
42
  ],
43
  outputs=[
@@ -45,5 +73,5 @@ gr.Interface(
45
  gr.Textbox(label="Config", lines=10)
46
  ],
47
  title="Model Inspector",
48
- description="Upload a .safetensors file to analyze its structure, parameter count, and optionally its config.json"
49
- ).launch()
 
3
  from model_loader import get_top_layers, load_model_summary, load_config
4
  import tempfile
5
  import os
6
+ import requests
7
+ import json
8
 
9
+ def inspect_model(model_id, config_file=None):
10
+ if not model_id or '/' not in model_id:
11
+ return "Please provide a valid model ID in the format username/modelname", "No config loaded."
12
+
13
+ username, modelname = model_id.split('/', 1)
14
+
15
+ model_url = f"https://huggingface.co/{username}/{modelname}/resolve/main/model.safetensors"
16
+
17
+ try:
18
+ response = requests.get(model_url)
19
+ response.raise_for_status()
20
+
21
 
22
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".safetensors") as tmp:
23
+ tmp.write(response.content)
24
+ model_path = tmp.name
25
+
26
 
27
+ summary = load_model_summary(model_path)
28
+ state_dict = load_file(model_path)
29
+ top_layers = get_top_layers(state_dict, summary["total_params"])
30
+ top_layers_str = "\n".join([
31
+ f"{layer['name']}: shape={layer['shape']}, params={layer['params']:,} ({layer['percent']}%)"
32
+ for layer in top_layers
33
+ ])
34
+
35
 
36
+ config_data = {}
37
+ if config_file is not None:
38
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".json") as tmp_cfg:
39
+ tmp_cfg.write(config_file.read())
40
+ config_data = load_config(tmp_cfg.name)
41
+ os.unlink(tmp_cfg.name)
42
+ else:
43
+ config_url = f"https://huggingface.co/{username}/{modelname}/resolve/main/config.json"
44
+ try:
45
+ config_response = requests.get(config_url)
46
+ config_response.raise_for_status()
47
+ config_data = json.loads(config_response.content)
48
+ except Exception as e:
49
+ pass
50
+
51
+ config_str = "\n".join([f"{k}: {v}" for k, v in config_data.items()]) if config_data else "No config loaded."
52
+
53
+ os.unlink(model_path)
54
+
55
+ return (
56
+ f" Total tensors: {summary['num_tensors']}\n"
57
+ f" Total parameters: {summary['total_params']:,}\n\n"
58
+ f" Top Layers:\n{top_layers_str}",
59
+ config_str
60
+ )
61
+
62
+ except Exception as e:
63
+ return f"Error: {str(e)}", "No config loaded."
64
 
65
  gr.Interface(
66
  fn=inspect_model,
67
  inputs=[
68
+ gr.Textbox(label="Model ID from HuggingFace", placeholder="username/modelname", lines=1),
69
  gr.File(label="Upload config.json (optional)", type="binary")
70
  ],
71
  outputs=[
 
73
  gr.Textbox(label="Config", lines=10)
74
  ],
75
  title="Model Inspector",
76
+ description="Enter a HuggingFace model ID in the format username/modelname to analyze its structure, parameter count, and configuration."
77
+ ).launch()