Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,6 @@
|
|
1 |
-
import io, traceback, numpy as np, gradio as gr
|
|
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
from wfgy_sdk import get_engine
|
4 |
from wfgy_sdk.evaluator import compare_logits, plot_histogram
|
@@ -8,30 +10,27 @@ tok = AutoTokenizer.from_pretrained(MODEL)
|
|
8 |
mdl = AutoModelForCausalLM.from_pretrained(MODEL)
|
9 |
eng = get_engine()
|
10 |
|
11 |
-
|
12 |
def run(prompt: str):
|
13 |
prompt = prompt.strip()
|
14 |
if not prompt:
|
15 |
return "", "", "no prompt – nothing to show", None
|
16 |
try:
|
17 |
ids = tok(prompt, return_tensors="pt").input_ids
|
18 |
-
|
19 |
G = np.random.randn(256).astype(np.float32)
|
20 |
I = G + np.random.normal(scale=0.05, size=256).astype(np.float32)
|
21 |
-
|
22 |
-
m = compare_logits(
|
23 |
-
headline = f"▼ var {m['var_drop']*100:4.1f}
|
24 |
-
fig = plot_histogram(
|
25 |
buf = io.BytesIO(); fig.savefig(buf, format="png"); buf.seek(0)
|
26 |
-
raw_txt = prompt + tok.decode(int(
|
27 |
-
mod_txt = prompt + tok.decode(int(
|
28 |
return raw_txt, mod_txt, headline, buf
|
29 |
-
except Exception:
|
30 |
-
# fallback: show traceback as text, return blanks for image
|
31 |
tb = traceback.format_exc()
|
32 |
return "runtime error", tb, "runtime error", None
|
33 |
|
34 |
-
|
35 |
with gr.Blocks(title="WFGY variance gate") as demo:
|
36 |
gr.Markdown("# 🧠 WFGY simulation demo")
|
37 |
prompt = gr.Textbox(label="Prompt", value="Explain Schrödinger's cat")
|
@@ -40,16 +39,12 @@ with gr.Blocks(title="WFGY variance gate") as demo:
|
|
40 |
with gr.Row():
|
41 |
raw_box = gr.Textbox(label="Raw GPT-2")
|
42 |
mod_box = gr.Textbox(label="After WFGY")
|
43 |
-
|
44 |
headline = gr.Markdown()
|
45 |
img = gr.Image(label="Logit histogram")
|
46 |
|
47 |
btn.click(run, prompt, [raw_box, mod_box, headline, img])
|
48 |
|
49 |
-
gr.Markdown(
|
50 |
-
"---\n"
|
51 |
-
"### ⭐ Help unlock **WFGY 2.0** — 10 000 stars by **2025-08-01**"
|
52 |
-
)
|
53 |
|
54 |
if __name__ == "__main__":
|
55 |
demo.queue().launch()
|
|
|
1 |
+
import io, traceback, numpy as np, gradio as gr, matplotlib
|
2 |
+
matplotlib.use("Agg") # headless backend
|
3 |
+
|
4 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
5 |
from wfgy_sdk import get_engine
|
6 |
from wfgy_sdk.evaluator import compare_logits, plot_histogram
|
|
|
10 |
mdl = AutoModelForCausalLM.from_pretrained(MODEL)
|
11 |
eng = get_engine()
|
12 |
|
|
|
13 |
def run(prompt: str):
|
14 |
prompt = prompt.strip()
|
15 |
if not prompt:
|
16 |
return "", "", "no prompt – nothing to show", None
|
17 |
try:
|
18 |
ids = tok(prompt, return_tensors="pt").input_ids
|
19 |
+
raw = mdl(ids).logits[0, -1].detach().cpu().numpy()
|
20 |
G = np.random.randn(256).astype(np.float32)
|
21 |
I = G + np.random.normal(scale=0.05, size=256).astype(np.float32)
|
22 |
+
mod = eng.run(I, G, raw)
|
23 |
+
m = compare_logits(raw, mod)
|
24 |
+
headline = f"▼ var {m['var_drop']*100:4.1f}% | KL {m['kl']:.3f}"
|
25 |
+
fig = plot_histogram(raw, mod)
|
26 |
buf = io.BytesIO(); fig.savefig(buf, format="png"); buf.seek(0)
|
27 |
+
raw_txt = prompt + tok.decode(int(raw.argmax()))
|
28 |
+
mod_txt = prompt + tok.decode(int(mod.argmax()))
|
29 |
return raw_txt, mod_txt, headline, buf
|
30 |
+
except Exception as e:
|
|
|
31 |
tb = traceback.format_exc()
|
32 |
return "runtime error", tb, "runtime error", None
|
33 |
|
|
|
34 |
with gr.Blocks(title="WFGY variance gate") as demo:
|
35 |
gr.Markdown("# 🧠 WFGY simulation demo")
|
36 |
prompt = gr.Textbox(label="Prompt", value="Explain Schrödinger's cat")
|
|
|
39 |
with gr.Row():
|
40 |
raw_box = gr.Textbox(label="Raw GPT-2")
|
41 |
mod_box = gr.Textbox(label="After WFGY")
|
|
|
42 |
headline = gr.Markdown()
|
43 |
img = gr.Image(label="Logit histogram")
|
44 |
|
45 |
btn.click(run, prompt, [raw_box, mod_box, headline, img])
|
46 |
|
47 |
+
gr.Markdown("---\n### ⭐ 10 000 stars → unlock **WFGY 2.0** by 2025-08-01")
|
|
|
|
|
|
|
48 |
|
49 |
if __name__ == "__main__":
|
50 |
demo.queue().launch()
|