OneStarDao commited on
Commit
7429b83
·
verified ·
1 Parent(s): b4b22d4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -22
app.py CHANGED
@@ -1,47 +1,54 @@
1
- import io, numpy as np, gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
  from wfgy_sdk import get_engine
4
  from wfgy_sdk.evaluator import compare_logits, plot_histogram
5
 
6
- MODEL_ID = "sshleifer/tiny-gpt2"
7
- tok = AutoTokenizer.from_pretrained(MODEL_ID)
8
- mdl = AutoModelForCausalLM.from_pretrained(MODEL_ID)
9
  eng = get_engine()
10
 
 
11
  def run(prompt: str):
12
  prompt = prompt.strip()
13
  if not prompt:
14
  return "", "", "no prompt – nothing to show", None
15
- ids = tok(prompt, return_tensors="pt").input_ids
16
- logits_raw = mdl(ids).logits[0, -1].detach().cpu().numpy()
17
- G = np.random.randn(256).astype(np.float32)
18
- I = G + np.random.normal(scale=0.05, size=256).astype(np.float32)
19
- logits_mod = eng.run(I, G, logits_raw)
20
- m = compare_logits(logits_raw, logits_mod)
21
- headline = f"▼ var {m['var_drop']*100:4.1f} % | KL {m['kl']:.3f}"
22
- fig = plot_histogram(logits_raw, logits_mod)
23
- buf = io.BytesIO(); fig.savefig(buf, format="png"); buf.seek(0)
24
- raw_txt = prompt + tok.decode(int(logits_raw.argmax()))
25
- mod_txt = prompt + tok.decode(int(logits_mod.argmax()))
26
- return raw_txt, mod_txt, headline, buf
 
 
 
 
 
 
27
 
28
  with gr.Blocks(title="WFGY variance gate") as demo:
29
- gr.Markdown(
30
- "# 🧠 WFGY simulation demo \n"
31
- "Type any prompt and watch the logit variance collapse."
32
- )
33
  prompt = gr.Textbox(label="Prompt", value="Explain Schrödinger's cat")
34
  btn = gr.Button("🚀 Run")
 
35
  with gr.Row():
36
  raw_box = gr.Textbox(label="Raw GPT-2")
37
  mod_box = gr.Textbox(label="After WFGY")
 
38
  headline = gr.Markdown()
39
  img = gr.Image(label="Logit histogram")
 
40
  btn.click(run, prompt, [raw_box, mod_box, headline, img])
 
41
  gr.Markdown(
42
  "---\n"
43
- "### ⭐ Help unlock **WFGY 2.0** \n"
44
- "10 000 GitHub stars by **2025-08-01** → next-gen release."
45
  )
46
 
47
  if __name__ == "__main__":
 
1
+ import io, traceback, numpy as np, gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
  from wfgy_sdk import get_engine
4
  from wfgy_sdk.evaluator import compare_logits, plot_histogram
5
 
6
+ MODEL = "sshleifer/tiny-gpt2"
7
+ tok = AutoTokenizer.from_pretrained(MODEL)
8
+ mdl = AutoModelForCausalLM.from_pretrained(MODEL)
9
  eng = get_engine()
10
 
11
+
12
  def run(prompt: str):
13
  prompt = prompt.strip()
14
  if not prompt:
15
  return "", "", "no prompt – nothing to show", None
16
+ try:
17
+ ids = tok(prompt, return_tensors="pt").input_ids
18
+ rawL = mdl(ids).logits[0, -1].detach().cpu().numpy()
19
+ G = np.random.randn(256).astype(np.float32)
20
+ I = G + np.random.normal(scale=0.05, size=256).astype(np.float32)
21
+ modL = eng.run(I, G, rawL)
22
+ m = compare_logits(rawL, modL)
23
+ headline = f"▼ var {m['var_drop']*100:4.1f} % | KL {m['kl']:.3f}"
24
+ fig = plot_histogram(rawL, modL)
25
+ buf = io.BytesIO(); fig.savefig(buf, format="png"); buf.seek(0)
26
+ raw_txt = prompt + tok.decode(int(rawL.argmax()))
27
+ mod_txt = prompt + tok.decode(int(modL.argmax()))
28
+ return raw_txt, mod_txt, headline, buf
29
+ except Exception:
30
+ # fallback: show traceback as text, return blanks for image
31
+ tb = traceback.format_exc()
32
+ return "runtime error", tb, "runtime error", None
33
+
34
 
35
  with gr.Blocks(title="WFGY variance gate") as demo:
36
+ gr.Markdown("# 🧠 WFGY simulation demo")
 
 
 
37
  prompt = gr.Textbox(label="Prompt", value="Explain Schrödinger's cat")
38
  btn = gr.Button("🚀 Run")
39
+
40
  with gr.Row():
41
  raw_box = gr.Textbox(label="Raw GPT-2")
42
  mod_box = gr.Textbox(label="After WFGY")
43
+
44
  headline = gr.Markdown()
45
  img = gr.Image(label="Logit histogram")
46
+
47
  btn.click(run, prompt, [raw_box, mod_box, headline, img])
48
+
49
  gr.Markdown(
50
  "---\n"
51
+ "### ⭐ Help unlock **WFGY 2.0** — 10 000 stars by **2025-08-01**"
 
52
  )
53
 
54
  if __name__ == "__main__":