Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,8 +1,4 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
import io
|
4 |
-
import numpy as np
|
5 |
-
import gradio as gr
|
6 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
7 |
from wfgy_sdk import get_engine
|
8 |
from wfgy_sdk.evaluator import compare_logits, plot_histogram
|
@@ -12,52 +8,36 @@ tok = AutoTokenizer.from_pretrained(MODEL_ID)
|
|
12 |
mdl = AutoModelForCausalLM.from_pretrained(MODEL_ID)
|
13 |
eng = get_engine()
|
14 |
|
15 |
-
|
16 |
def run(prompt: str):
|
17 |
prompt = prompt.strip()
|
18 |
if not prompt:
|
19 |
return "", "", "no prompt – nothing to show", None
|
20 |
-
|
21 |
ids = tok(prompt, return_tensors="pt").input_ids
|
22 |
logits_raw = mdl(ids).logits[0, -1].detach().cpu().numpy()
|
23 |
-
|
24 |
-
# toy fingerprints
|
25 |
G = np.random.randn(256).astype(np.float32)
|
26 |
I = G + np.random.normal(scale=0.05, size=256).astype(np.float32)
|
27 |
-
|
28 |
logits_mod = eng.run(I, G, logits_raw)
|
29 |
m = compare_logits(logits_raw, logits_mod)
|
30 |
-
|
31 |
headline = f"▼ var {m['var_drop']*100:4.1f} % | KL {m['kl']:.3f}"
|
32 |
-
|
33 |
fig = plot_histogram(logits_raw, logits_mod)
|
34 |
-
buf = io.BytesIO()
|
35 |
-
fig.savefig(buf, format="png")
|
36 |
-
buf.seek(0)
|
37 |
-
|
38 |
raw_txt = prompt + tok.decode(int(logits_raw.argmax()))
|
39 |
mod_txt = prompt + tok.decode(int(logits_mod.argmax()))
|
40 |
return raw_txt, mod_txt, headline, buf
|
41 |
|
42 |
-
|
43 |
with gr.Blocks(title="WFGY variance gate") as demo:
|
44 |
gr.Markdown(
|
45 |
"# 🧠 WFGY simulation demo \n"
|
46 |
-
"Type any prompt and watch the logit variance collapse
|
47 |
)
|
48 |
-
|
49 |
prompt = gr.Textbox(label="Prompt", value="Explain Schrödinger's cat")
|
50 |
btn = gr.Button("🚀 Run")
|
51 |
-
|
52 |
with gr.Row():
|
53 |
raw_box = gr.Textbox(label="Raw GPT-2")
|
54 |
mod_box = gr.Textbox(label="After WFGY")
|
55 |
-
|
56 |
headline = gr.Markdown()
|
57 |
img = gr.Image(label="Logit histogram")
|
58 |
-
|
59 |
btn.click(run, prompt, [raw_box, mod_box, headline, img])
|
60 |
-
|
61 |
gr.Markdown(
|
62 |
"---\n"
|
63 |
"### ⭐ Help unlock **WFGY 2.0** \n"
|
@@ -65,5 +45,4 @@ with gr.Blocks(title="WFGY variance gate") as demo:
|
|
65 |
)
|
66 |
|
67 |
if __name__ == "__main__":
|
68 |
-
# Gradio ≥4.31: queue() has no arg; use default queue size (=2)
|
69 |
demo.queue().launch()
|
|
|
1 |
+
import io, numpy as np, gradio as gr
|
|
|
|
|
|
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
from wfgy_sdk import get_engine
|
4 |
from wfgy_sdk.evaluator import compare_logits, plot_histogram
|
|
|
8 |
mdl = AutoModelForCausalLM.from_pretrained(MODEL_ID)
|
9 |
eng = get_engine()
|
10 |
|
|
|
11 |
def run(prompt: str):
|
12 |
prompt = prompt.strip()
|
13 |
if not prompt:
|
14 |
return "", "", "no prompt – nothing to show", None
|
|
|
15 |
ids = tok(prompt, return_tensors="pt").input_ids
|
16 |
logits_raw = mdl(ids).logits[0, -1].detach().cpu().numpy()
|
|
|
|
|
17 |
G = np.random.randn(256).astype(np.float32)
|
18 |
I = G + np.random.normal(scale=0.05, size=256).astype(np.float32)
|
|
|
19 |
logits_mod = eng.run(I, G, logits_raw)
|
20 |
m = compare_logits(logits_raw, logits_mod)
|
|
|
21 |
headline = f"▼ var {m['var_drop']*100:4.1f} % | KL {m['kl']:.3f}"
|
|
|
22 |
fig = plot_histogram(logits_raw, logits_mod)
|
23 |
+
buf = io.BytesIO(); fig.savefig(buf, format="png"); buf.seek(0)
|
|
|
|
|
|
|
24 |
raw_txt = prompt + tok.decode(int(logits_raw.argmax()))
|
25 |
mod_txt = prompt + tok.decode(int(logits_mod.argmax()))
|
26 |
return raw_txt, mod_txt, headline, buf
|
27 |
|
|
|
28 |
with gr.Blocks(title="WFGY variance gate") as demo:
|
29 |
gr.Markdown(
|
30 |
"# 🧠 WFGY simulation demo \n"
|
31 |
+
"Type any prompt and watch the logit variance collapse."
|
32 |
)
|
|
|
33 |
prompt = gr.Textbox(label="Prompt", value="Explain Schrödinger's cat")
|
34 |
btn = gr.Button("🚀 Run")
|
|
|
35 |
with gr.Row():
|
36 |
raw_box = gr.Textbox(label="Raw GPT-2")
|
37 |
mod_box = gr.Textbox(label="After WFGY")
|
|
|
38 |
headline = gr.Markdown()
|
39 |
img = gr.Image(label="Logit histogram")
|
|
|
40 |
btn.click(run, prompt, [raw_box, mod_box, headline, img])
|
|
|
41 |
gr.Markdown(
|
42 |
"---\n"
|
43 |
"### ⭐ Help unlock **WFGY 2.0** \n"
|
|
|
45 |
)
|
46 |
|
47 |
if __name__ == "__main__":
|
|
|
48 |
demo.queue().launch()
|