Spaces:
Running
Running
Remove migration utils
#31
by
levmckinney
- opened
This view is limited to 50 files because it contains too many changes.
See the raw diff here.
- .dockerignore +0 -2
- .gitattributes +0 -1
- Dockerfile +0 -25
- README.md +4 -1
- __pycache__/app.cpython-310.pyc +0 -0
- app.py +19 -24
- lens/CarperAI/stable-vicuna-13b/config.json +0 -1
- lens/CarperAI/stable-vicuna-13b/params.pt +0 -3
- lens/EleutherAI/pythia-1.4b-deduped/config.json +0 -1
- lens/EleutherAI/pythia-1.4b-deduped/params.pt +0 -3
- lens/EleutherAI/pythia-12b-deduped/config.json +0 -1
- lens/EleutherAI/pythia-12b-deduped/params.pt +0 -3
- lens/EleutherAI/pythia-160m-deduped/config.json +0 -1
- lens/EleutherAI/pythia-160m-deduped/params.pt +0 -3
- lens/EleutherAI/pythia-2.8b-deduped-v0/config.json +0 -1
- lens/EleutherAI/pythia-2.8b-deduped-v0/params.pt +0 -3
- lens/EleutherAI/pythia-2.8b-deduped/config.json +0 -1
- lens/EleutherAI/pythia-2.8b-deduped/params.pt +0 -3
- lens/EleutherAI/pythia-410m-deduped/config.json +0 -1
- lens/EleutherAI/pythia-410m-deduped/params.pt +0 -3
- lens/EleutherAI/pythia-6.9b-deduped/config.json +0 -1
- lens/EleutherAI/pythia-6.9b-deduped/params.pt +0 -3
- lens/EleutherAI/pythia-70m-deduped/config.json +0 -1
- lens/EleutherAI/pythia-70m-deduped/params.pt +0 -3
- lens/facebook/llama-13b/config.json +0 -1
- lens/facebook/llama-13b/params.pt +0 -3
- lens/facebook/llama-30b/config.json +0 -1
- lens/facebook/llama-30b/params.pt +0 -3
- lens/facebook/llama-65b/config.json +0 -1
- lens/facebook/llama-65b/params.pt +0 -3
- lens/facebook/llama-7b/config.json +0 -1
- lens/facebook/llama-7b/params.pt +0 -3
- lens/{EleutherAI/gpt-neox-20b → gpt-neox-20b}/config.json +1 -1
- lens/{EleutherAI/gpt-neox-20b → gpt-neox-20b}/params.pt +0 -0
- lens/gpt2-large/config.json +1 -1
- lens/gpt2-xl/config.json +1 -1
- lens/gpt2/config.json +1 -1
- lens/lmsys/vicuna-13b-v1.1/config.json +0 -1
- lens/lmsys/vicuna-13b-v1.1/params.pt +0 -3
- lens/meta-llama/Llama-2-13b-chat-hf/config.json +0 -1
- lens/meta-llama/Llama-2-13b-chat-hf/hh-rlhf/config.json +0 -1
- lens/meta-llama/Llama-2-13b-chat-hf/hh-rlhf/params.pt +0 -3
- lens/meta-llama/Llama-2-13b-chat-hf/params.pt +0 -3
- lens/meta-llama/Llama-2-13b-hf/config.json +0 -1
- lens/meta-llama/Llama-2-13b-hf/params.pt +0 -3
- lens/meta-llama/Llama-2-7b-chat-hf/config.json +0 -1
- lens/meta-llama/Llama-2-7b-chat-hf/hh-rlhf/config.json +0 -1
- lens/meta-llama/Llama-2-7b-chat-hf/hh-rlhf/params.pt +0 -3
- lens/meta-llama/Llama-2-7b-chat-hf/params.pt +0 -3
- lens/meta-llama/Llama-2-7b-hf/config.json +0 -1
.dockerignore
DELETED
@@ -1,2 +0,0 @@
|
|
1 |
-
lens
|
2 |
-
.git
|
|
|
|
|
|
.gitattributes
CHANGED
@@ -32,4 +32,3 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
32 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
32 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
Dockerfile
DELETED
@@ -1,25 +0,0 @@
|
|
1 |
-
FROM python:3.9
|
2 |
-
|
3 |
-
WORKDIR /code
|
4 |
-
|
5 |
-
COPY ./requirements.txt /code/requirements.txt
|
6 |
-
|
7 |
-
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
8 |
-
|
9 |
-
# Set up a new user named "user" with user ID 1000
|
10 |
-
RUN useradd -m -u 1000 user
|
11 |
-
|
12 |
-
# Switch to the "user" user
|
13 |
-
USER user
|
14 |
-
|
15 |
-
# Set home to the user's home directory
|
16 |
-
ENV HOME=/home/user \
|
17 |
-
PATH=/home/user/.local/bin:$PATH
|
18 |
-
|
19 |
-
# Set the working directory to the user's home directory
|
20 |
-
WORKDIR $HOME/app
|
21 |
-
|
22 |
-
# Copy the current directory contents into the container at $HOME/app setting the owner to the user
|
23 |
-
COPY --chown=user . $HOME/app
|
24 |
-
|
25 |
-
CMD ["python", "app.py"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
README.md
CHANGED
@@ -3,7 +3,10 @@ title: Tuned Lens
|
|
3 |
emoji: 🔎
|
4 |
colorFrom: pink
|
5 |
colorTo: blue
|
6 |
-
sdk:
|
|
|
|
|
|
|
7 |
pinned: false
|
8 |
license: mit
|
9 |
---
|
|
|
3 |
emoji: 🔎
|
4 |
colorFrom: pink
|
5 |
colorTo: blue
|
6 |
+
sdk: gradio
|
7 |
+
python_version: 3.10.2
|
8 |
+
sdk_version: 3.20.0
|
9 |
+
app_file: app.py
|
10 |
pinned: false
|
11 |
license: mit
|
12 |
---
|
__pycache__/app.cpython-310.pyc
ADDED
Binary file (3.92 kB). View file
|
|
app.py
CHANGED
@@ -1,20 +1,17 @@
|
|
1 |
import torch
|
2 |
from tuned_lens.nn.lenses import TunedLens, LogitLens
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
-
from tuned_lens.plotting import
|
5 |
import gradio as gr
|
6 |
from plotly import graph_objects as go
|
7 |
|
8 |
device = torch.device("cpu")
|
9 |
print(f"Using device {device} for inference")
|
10 |
-
model = AutoModelForCausalLM.from_pretrained("EleutherAI/pythia-410m-deduped")
|
11 |
model = model.to(device)
|
12 |
-
tokenizer = AutoTokenizer.from_pretrained("EleutherAI/pythia-410m-deduped")
|
13 |
-
tuned_lens = TunedLens.
|
14 |
-
|
15 |
-
map_location=device,
|
16 |
-
)
|
17 |
-
logit_lens = LogitLens.from_model(model)
|
18 |
|
19 |
lens_options_dict = {
|
20 |
"Tuned Lens": tuned_lens,
|
@@ -23,35 +20,32 @@ lens_options_dict = {
|
|
23 |
|
24 |
statistic_options_dict = {
|
25 |
"Entropy": "entropy",
|
26 |
-
"Cross Entropy": "
|
27 |
"Forward KL": "forward_kl",
|
28 |
}
|
29 |
|
30 |
|
31 |
def make_plot(lens, text, statistic, token_cutoff):
|
32 |
-
input_ids = tokenizer.encode(text)
|
33 |
-
input_ids = [tokenizer.bos_token_id] + input_ids
|
34 |
-
targets = input_ids[1:] + [tokenizer.eos_token_id]
|
35 |
|
36 |
-
if len(input_ids) ==
|
37 |
return go.Figure(layout=dict(title="Please enter some text."))
|
38 |
|
39 |
if token_cutoff < 1:
|
40 |
return go.Figure(layout=dict(title="Please provide valid token cut off."))
|
41 |
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
|
|
46 |
input_ids=input_ids,
|
47 |
-
|
48 |
-
|
49 |
-
start_pos=start_pos,
|
50 |
)
|
51 |
|
52 |
-
return
|
53 |
-
|
54 |
-
)
|
55 |
|
56 |
preamble = """
|
57 |
# The Tuned Lens 🔎
|
@@ -114,4 +108,5 @@ with gr.Blocks() as demo:
|
|
114 |
demo.load(make_plot, [lens_options, text, statistic, token_cutoff], plot)
|
115 |
|
116 |
if __name__ == "__main__":
|
117 |
-
demo.launch(
|
|
|
|
1 |
import torch
|
2 |
from tuned_lens.nn.lenses import TunedLens, LogitLens
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
+
from tuned_lens.plotting import plot_lens
|
5 |
import gradio as gr
|
6 |
from plotly import graph_objects as go
|
7 |
|
8 |
device = torch.device("cpu")
|
9 |
print(f"Using device {device} for inference")
|
10 |
+
model = AutoModelForCausalLM.from_pretrained("EleutherAI/pythia-410m-deduped-v0")
|
11 |
model = model.to(device)
|
12 |
+
tokenizer = AutoTokenizer.from_pretrained("EleutherAI/pythia-410m-deduped-v0")
|
13 |
+
tuned_lens = TunedLens.load("pythia-410m-deduped-v0", map_location=device)
|
14 |
+
logit_lens = LogitLens(model)
|
|
|
|
|
|
|
15 |
|
16 |
lens_options_dict = {
|
17 |
"Tuned Lens": tuned_lens,
|
|
|
20 |
|
21 |
statistic_options_dict = {
|
22 |
"Entropy": "entropy",
|
23 |
+
"Cross Entropy": "ce",
|
24 |
"Forward KL": "forward_kl",
|
25 |
}
|
26 |
|
27 |
|
28 |
def make_plot(lens, text, statistic, token_cutoff):
|
29 |
+
input_ids = tokenizer.encode(text, return_tensors="pt")
|
|
|
|
|
30 |
|
31 |
+
if len(input_ids[0]) == 0:
|
32 |
return go.Figure(layout=dict(title="Please enter some text."))
|
33 |
|
34 |
if token_cutoff < 1:
|
35 |
return go.Figure(layout=dict(title="Please provide valid token cut off."))
|
36 |
|
37 |
+
fig = plot_lens(
|
38 |
+
model,
|
39 |
+
tokenizer,
|
40 |
+
lens_options_dict[lens],
|
41 |
+
layer_stride=2,
|
42 |
input_ids=input_ids,
|
43 |
+
start_pos=max(len(input_ids[0]) - token_cutoff, 0),
|
44 |
+
statistic=statistic_options_dict[statistic],
|
|
|
45 |
)
|
46 |
|
47 |
+
return fig
|
48 |
+
|
|
|
49 |
|
50 |
preamble = """
|
51 |
# The Tuned Lens 🔎
|
|
|
108 |
demo.load(make_plot, [lens_options, text, statistic, token_cutoff], plot)
|
109 |
|
110 |
if __name__ == "__main__":
|
111 |
+
demo.launch()
|
112 |
+
|
lens/CarperAI/stable-vicuna-13b/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "vicuna-13b", "d_model": 5120, "num_hidden_layers": 40, "bias": true, "base_model_revision": null, "unembed_hash": "270c781d8280754a30abaf6fc186b1d754cc5d6bb17173d572d5517d5c7c702c", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/CarperAI/stable-vicuna-13b/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:47afb57632ac509a309e75da139e666ba5c7e93f2a305b2e286d7e748b7cc476
|
3 |
-
size 2097581027
|
|
|
|
|
|
|
|
lens/EleutherAI/pythia-1.4b-deduped/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "EleutherAI/pythia-1.4b-deduped", "d_model": 2048, "num_hidden_layers": 24, "bias": true, "base_model_revision": null, "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/EleutherAI/pythia-1.4b-deduped/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:b56db530d2c0df1bc5916bae58b241cfd4389dd4b1aa29e7210395df97164824
|
3 |
-
size 402861347
|
|
|
|
|
|
|
|
lens/EleutherAI/pythia-12b-deduped/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "EleutherAI/pythia-12b-deduped", "d_model": 5120, "num_hidden_layers": 36, "bias": true, "base_model_revision": null, "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/EleutherAI/pythia-12b-deduped/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:8452947780e3caf7009a752d62f1a2d71c66f67b3855c7e409869b7e633db40e
|
3 |
-
size 3775628355
|
|
|
|
|
|
|
|
lens/EleutherAI/pythia-160m-deduped/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "EleutherAI/pythia-160m-deduped", "d_model": 768, "num_hidden_layers": 12, "bias": true, "base_model_revision": null, "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/EleutherAI/pythia-160m-deduped/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:1d16b68baa1eb903bbf74eaf859cee5c34361f3e8dff331b17f55b8ce52a2dbe
|
3 |
-
size 28354051
|
|
|
|
|
|
|
|
lens/EleutherAI/pythia-2.8b-deduped-v0/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "EleutherAI/pythia-2.8b-deduped-v0", "d_model": 2560, "num_hidden_layers": 32, "bias": true, "base_model_revision": null, "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/EleutherAI/pythia-2.8b-deduped-v0/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f7892bd4af71a436aaba3a650ea907c94a077b8f16eb343c9a8762872c73026f
|
3 |
-
size 839204003
|
|
|
|
|
|
|
|
lens/EleutherAI/pythia-2.8b-deduped/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "EleutherAI/pythia-2.8b-deduped", "d_model": 2560, "num_hidden_layers": 32, "bias": true, "base_model_revision": null, "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/EleutherAI/pythia-2.8b-deduped/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:314d403f2e1b1bf575ab2e851419d3a687c54138bfae5feace3ff00f1a96fd60
|
3 |
-
size 839204003
|
|
|
|
|
|
|
|
lens/EleutherAI/pythia-410m-deduped/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "EleutherAI/pythia-410m-deduped", "d_model": 1024, "num_hidden_layers": 24, "bias": true, "base_model_revision": null, "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/EleutherAI/pythia-410m-deduped/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:166ea259b35481e1eb2feba50b5ac4d9a8faed47b0937ede0d7bd6d9830dbc95
|
3 |
-
size 100773155
|
|
|
|
|
|
|
|
lens/EleutherAI/pythia-6.9b-deduped/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "EleutherAI/pythia-6.9b-deduped", "d_model": 4096, "num_hidden_layers": 32, "bias": true, "base_model_revision": null, "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/EleutherAI/pythia-6.9b-deduped/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:e2d00af6f64631b932b089fac5ca031061e207d51132a0f7433bd7a34fb06da0
|
3 |
-
size 2148023459
|
|
|
|
|
|
|
|
lens/EleutherAI/pythia-70m-deduped/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "EleutherAI/pythia-70m-deduped", "d_model": 512, "num_hidden_layers": 6, "bias": true, "base_model_revision": null, "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/EleutherAI/pythia-70m-deduped/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:c363b641564af68e9f9b73af56015b6b5c5b08caea5880787744eaabfab01343
|
3 |
-
size 6306803
|
|
|
|
|
|
|
|
lens/facebook/llama-13b/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "huggyllama/llama-13b", "d_model": 5120, "num_hidden_layers": 40, "bias": true, "base_model_revision": null, "unembed_hash": "86cefdfd94bb3da225b405dd1328136786f6177b03d82f90e5c734d23c47e8ca", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/facebook/llama-13b/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:2700859b4335f4e4bdd4040232f594170bb717f8af04f65e5560c49cfb6da122
|
3 |
-
size 2097581027
|
|
|
|
|
|
|
|
lens/facebook/llama-30b/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "huggyllama/llama-30b", "d_model": 6656, "num_hidden_layers": 60, "bias": true, "base_model_revision": null, "unembed_hash": "512b41bb8fc0e2b3647a877b5e114ca9503d7c800185b14a8e7ad9e921424367", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/facebook/llama-30b/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:10e781269b476a6de8f4ac12ea3f8b20fe22d3a8ca8cfdf770366a5f4e36c78f
|
3 |
-
size 5317111487
|
|
|
|
|
|
|
|
lens/facebook/llama-65b/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "huggyllama/llama-65b", "d_model": 8192, "num_hidden_layers": 80, "bias": true, "base_model_revision": null, "unembed_hash": "8ffc2dbf80dd5c131c4be9e347d090856bef2dbc973433dbd42ca9257b00d5e1", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/facebook/llama-65b/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:c9199874796630e1be615b0b34cd48e7f37ed09c6140153cdf6b7c509c45361b
|
3 |
-
size 10738779199
|
|
|
|
|
|
|
|
lens/facebook/llama-7b/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "huggyllama/llama-7b", "d_model": 4096, "num_hidden_layers": 32, "bias": true, "base_model_revision": null, "unembed_hash": "7d0c2d720d286bdd706e662ea04f327204090f7d54054b0d5faabbc1b06a72fe", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/facebook/llama-7b/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:02186a08b29e7173ae229218823b65891a7b2f2eadf0e69baf44a9c62b55a7cf
|
3 |
-
size 1074019491
|
|
|
|
|
|
|
|
lens/{EleutherAI/gpt-neox-20b → gpt-neox-20b}/config.json
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"base_model_name_or_path": "EleutherAI/gpt-neox-20b", "d_model": 6144, "num_hidden_layers": 44, "bias": true, "base_model_revision": "4e49eadb5d14bd22f314ec3f45b69a87b88c7691", "lens_type": "linear_tuned_lens"}
|
|
|
1 |
+
{"base_model_name_or_path": "EleutherAI/gpt-neox-20b", "d_model": 6144, "num_hidden_layers": 44, "bias": true, "base_model_revision": "4e49eadb5d14bd22f314ec3f45b69a87b88c7691", "unemebd_hash": "323d4c731c33556e143503e3be913c109ead330080b4065552be97000c19ed67", "lens_type": "linear_tuned_lens"}
|
lens/{EleutherAI/gpt-neox-20b → gpt-neox-20b}/params.pt
RENAMED
File without changes
|
lens/gpt2-large/config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"base_model_name_or_path": "gpt2-large", "d_model": 1280, "num_hidden_layers": 36, "bias": true, "base_model_revision": "212095d5832abbf9926672e1c1e8d14312a3be20", "lens_type": "linear_tuned_lens"}
|
|
|
1 |
+
{"base_model_name_or_path": "gpt2-large", "d_model": 1280, "num_hidden_layers": 36, "bias": true, "base_model_revision": "212095d5832abbf9926672e1c1e8d14312a3be20", "unemebd_hash": "9b7da774c0a326716dca888539370ddff25804795949e5ace65ef9f761f47397", "lens_type": "linear_tuned_lens"}
|
lens/gpt2-xl/config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"base_model_name_or_path": "gpt2-xl", "d_model": 1600, "num_hidden_layers": 48, "bias": true, "base_model_revision": "33cdb5c0db5423c1879b1b9f16c352988e8754a8", "lens_type": "linear_tuned_lens"}
|
|
|
1 |
+
{"base_model_name_or_path": "gpt2-xl", "d_model": 1600, "num_hidden_layers": 48, "bias": true, "base_model_revision": "33cdb5c0db5423c1879b1b9f16c352988e8754a8", "unemebd_hash": "70bf58a8cf7964b39530e30fdaebb89de39489546244437b1ed56fb81bd4c746", "lens_type": "linear_tuned_lens"}
|
lens/gpt2/config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"base_model_name_or_path": "gpt2", "d_model": 768, "num_hidden_layers": 12, "bias": true, "base_model_revision": "e7da7f221d5bf496a48136c0cd264e630fe9fcc8", "lens_type": "linear_tuned_lens"}
|
|
|
1 |
+
{"base_model_name_or_path": "gpt2", "d_model": 768, "num_hidden_layers": 12, "bias": true, "base_model_revision": "e7da7f221d5bf496a48136c0cd264e630fe9fcc8", "unemebd_hash": "608e50247f57691c90453601e854f2287141e4db9cba436af0b0186003e2daae", "lens_type": "linear_tuned_lens"}
|
lens/lmsys/vicuna-13b-v1.1/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "vicuna-original-13b", "d_model": 5120, "num_hidden_layers": 40, "bias": true, "base_model_revision": null, "unembed_hash": "86cefdfd94bb3da225b405dd1328136786f6177b03d82f90e5c734d23c47e8ca", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/lmsys/vicuna-13b-v1.1/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:0281f46b65b728cb390e41bd5848d8ab635b2847b48c080b2de2b78627baa40d
|
3 |
-
size 2097581027
|
|
|
|
|
|
|
|
lens/meta-llama/Llama-2-13b-chat-hf/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "meta-llama/Llama-2-13b-chat-hf", "d_model": 5120, "num_hidden_layers": 40, "bias": true, "base_model_revision": null, "unembed_hash": "05e6ab11c049cc1356b38bf9ff84ffbedaf802a3c7fdda9e763bca0c4e9de2ab", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/meta-llama/Llama-2-13b-chat-hf/hh-rlhf/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "meta-llama/Llama-2-13b-chat-hf", "d_model": 5120, "num_hidden_layers": 40, "bias": true, "base_model_revision": null, "unembed_hash": "05e6ab11c049cc1356b38bf9ff84ffbedaf802a3c7fdda9e763bca0c4e9de2ab", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/meta-llama/Llama-2-13b-chat-hf/hh-rlhf/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:6ceabb160e1b3ff887232514159bdf22c7f109a585759284eb7b18e72fb27667
|
3 |
-
size 2097581027
|
|
|
|
|
|
|
|
lens/meta-llama/Llama-2-13b-chat-hf/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:6a8af029394d9be03e6a9a75a03ac477fdb9e42352ad3df035439bbb8ccb1b5d
|
3 |
-
size 2097581027
|
|
|
|
|
|
|
|
lens/meta-llama/Llama-2-13b-hf/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "meta-llama/Llama-2-13b-hf", "d_model": 5120, "num_hidden_layers": 40, "bias": true, "base_model_revision": null, "unembed_hash": "1f421a88f21fcda6d1c8c17a481c46918d355f8b06cfc77b05cf738e11b55f7a", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/meta-llama/Llama-2-13b-hf/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f87b2a5f2b3125d9e49343cbac244b901795e3af4117ec0f6ee428f79027778d
|
3 |
-
size 2097581027
|
|
|
|
|
|
|
|
lens/meta-llama/Llama-2-7b-chat-hf/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "meta-llama/Llama-2-7b-chat-hf", "d_model": 4096, "num_hidden_layers": 32, "bias": true, "base_model_revision": null, "unembed_hash": "3313b62209f366e7b19233ae7068501553350611c20f96caf2499c0486f8836b", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/meta-llama/Llama-2-7b-chat-hf/hh-rlhf/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "meta-llama/Llama-2-7b-chat-hf", "d_model": 4096, "num_hidden_layers": 32, "bias": true, "base_model_revision": null, "unembed_hash": "3313b62209f366e7b19233ae7068501553350611c20f96caf2499c0486f8836b", "lens_type": "linear_tuned_lens"}
|
|
|
|
lens/meta-llama/Llama-2-7b-chat-hf/hh-rlhf/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:86abc8c2a2044c3a70e88291b636222a6db6cdb25af51e86108e479843a2326e
|
3 |
-
size 2148023459
|
|
|
|
|
|
|
|
lens/meta-llama/Llama-2-7b-chat-hf/params.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:c66e7d56afd044bc6472e91bf6103ee5ab837c5953b44c791b0f1f8ac8ce8578
|
3 |
-
size 2148023459
|
|
|
|
|
|
|
|
lens/meta-llama/Llama-2-7b-hf/config.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"base_model_name_or_path": "meta-llama/Llama-2-7b-hf", "d_model": 4096, "num_hidden_layers": 32, "bias": true, "base_model_revision": null, "unembed_hash": "3313b62209f366e7b19233ae7068501553350611c20f96caf2499c0486f8836b", "lens_type": "linear_tuned_lens"}
|
|
|
|