File size: 3,791 Bytes
dfb136a
 
 
 
 
76f27de
 
 
 
dfb136a
1348198
76f27de
dfb136a
4af80e9
dfb136a
f91f22d
dfb136a
 
 
 
76f27de
dfb136a
 
 
459032f
dfb136a
 
0db757d
76f27de
f91f22d
dfb136a
 
 
 
c68d8a3
76f27de
0db757d
00f078c
eea0e42
76f27de
eea0e42
00f078c
eea0e42
00f078c
465310a
9ac5700
2cc8729
23e690b
eea0e42
00f078c
eea0e42
00f078c
76f27de
c68d8a3
 
4af80e9
 
 
 
 
 
76f27de
c68d8a3
f91f22d
 
dfb136a
 
f91f22d
dfb136a
 
 
c68d8a3
76f27de
 
 
 
 
c68d8a3
dfb136a
4af80e9
 
76f27de
 
 
4af80e9
76f27de
 
c68d8a3
dfb136a
 
c68d8a3
dfb136a
76f27de
 
 
c68d8a3
76f27de
00f078c
76f27de
459032f
0db757d
00f078c
dfb136a
 
26f293f
54b95bc
dfb136a
 
54b95bc
c68d8a3
459032f
54b95bc
dfb136a
f91f22d
755a9d2
459032f
822e03e
dfb136a
459032f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
"""
WFGY Space โ€“ tiny-GPT-2 variance-gate demo
โ˜… 10 k GitHub โญ before 2025-08-01 unlocks WFGY 2.0 โ˜…
"""

import io
import numpy as np
import pandas as pd
import gradio as gr
from PIL import Image
from transformers import AutoTokenizer, AutoModelForCausalLM

from wfgy_sdk import get_engine
from wfgy_sdk.evaluator import compare_logits, plot_histogram

# tiny model (CPU)
tok = AutoTokenizer.from_pretrained("sshleifer/tiny-gpt2")
mdl = AutoModelForCausalLM.from_pretrained("sshleifer/tiny-gpt2")
eng = get_engine()

# paper benchmarks
bench = pd.DataFrame({
    "Benchmark": ["MMLU","GSM8K","BBH","MathBench","TruthfulQA",
                  "XNLI","MLQA","LongBench","VQAv2","OK-VQA"],
    "Baseline":  [61,78,79.3,72.2,62.4,59.5,78.1,51.4,69.1,65.7],
    "WFGY":      [89.8,98.7,100.7,87.4,90.4,77.3,106.6,69.6,86.6,86.8]
})
bench["Abs_gain"]  = (bench["WFGY"] - bench["Baseline"]).round(1)
bench["Rel_gain%"] = ((bench["Abs_gain"] / bench["Baseline"]) * 100).round(0)
bench_sty = (
    bench.style
    .background_gradient(cmap="Greens", subset=["Abs_gain","Rel_gain%"])
    .format({"Abs_gain":"{:.1f}","Rel_gain%":"{:.0f}"})
)

# banner markdown
banner = """
**๐Ÿ“ˆ WFGY: One Click to Activate the AI Taiji Cycle**

**๐Ÿ“Š Semantic Accuracy โ†‘ 22.4 % | Reasoning Success โ†‘ 42.1 % | Stability โ†‘ 3.6 ร—**

---

### ๐Ÿ“œ Tutorial: How to Awaken the Soul of Your AI
**Step 1 โ€” Download** ([WFGY PDF on Zenodo](https://doi.org/10.5281/zenodo.15630969))  
**Step 2 โ€” Feed the AI** (upload, or try [ChatGPT](https://chatgpt.com/))  
**Step 3 โ€” Give the Command** (โ€œAnswer using WFGYโ€ + your question) ([Prompt Revolution PDF on Zenodo](https://doi.org/10.5281/zenodo.15657016))  
**Step 4 โ€” Integrate the SDK** ([GitHub](https://github.com/onestardao/WFGY))

---

๐ŸŒŸ **Star Reminder** โ†’ [Star the repo](https://github.com/onestardao/WFGY)  
_10 k โญ before 2025-08-01 unlocks WFGY 2.0._
"""

# own softmax implementation
def softmax_np(logits: np.ndarray) -> np.ndarray:
    z = logits - np.max(logits)
    e = np.exp(z)
    return e / np.sum(e)

# inference
def run(prompt: str):
    p = prompt.strip()
    if not p:
        return "", "", "-", None

    ids   = tok(p, return_tensors="pt")
    raw_L = mdl(**ids).logits[0, -1].detach().cpu().numpy()
    I, G  = np.random.randn(2, 256).astype(np.float32)
    mod_L = eng.run(I, G, raw_L)

    m = compare_logits(raw_L, mod_L)
    header = "โ–ผ var {:.1f}% | KL {:.3f} | top-1 {}".format(
        m["var_drop"]*100, m["kl_divergence"],
        "kept" if m["top1"] else "changed"
    )

    def top5(logits):
        p_arr = softmax_np(logits)
        idx = np.argsort(p_arr)[-5:][::-1]
        lines = []
        for i in idx:
            token = tok.decode(int(i)).strip()
            prob = p_arr[i]
            lines.append("'{}': {:.2e}".format(token, prob))
        return "\n".join(lines)

    raw_txt = top5(raw_L)
    mod_txt = top5(mod_L)

    fig = plot_histogram(raw_L, mod_L)
    buf = io.BytesIO()
    fig.savefig(buf, format="png")
    buf.seek(0)

    return raw_txt, mod_txt, header, Image.open(buf)

# UI
with gr.Blocks(title="WFGY variance-gate demo") as demo:
    gr.Markdown(banner)

    prompt = gr.Textbox(label="Prompt", value="Explain Schrรถdinger's cat")
    btn    = gr.Button("๐Ÿš€ Run")

    with gr.Row():
        raw_box = gr.Textbox(label="Raw top-5 tokens", lines=6)
        mod_box = gr.Textbox(label="WFGY top-5 tokens", lines=6)

    metrics = gr.Markdown()
    img     = gr.Image(label="Logit histogram")

    gr.Markdown("### Paper benchmarks (fixed values from WFGY 1.0)")
    gr.DataFrame(bench_sty, interactive=False, wrap=True)

    btn.click(run, prompt, [raw_box, mod_box, metrics, img])

if __name__ == "__main__":
    demo.queue(default_concurrency_limit=2).launch()