-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathexperiment.py
More file actions
97 lines (80 loc) · 2.41 KB
/
experiment.py
File metadata and controls
97 lines (80 loc) · 2.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
# %%
from typing import List
import torch
from transformer_lens import HookedTransformer
from utils import (
docstring_metric,
ioi_metric,
kl_on_last_token,
layer_level_connectom,
sankey_diagram_of_connectome,
)
# %%
model = HookedTransformer.from_pretrained("gpt2-small")
# %%
prompt = "When Mary and John went to the store, John gave a drink to"
# %%
s_token_id = model.to_single_token(" John")
io_token_id = model.to_single_token(" Mary")
results = layer_level_connectom(model, prompt, ioi_metric(s_token_id, io_token_id), threshold=0.75)
# %%
results = layer_level_connectom(model, prompt, kl_on_last_token, threshold=0.05)
# %%
sankey_diagram_of_connectome(model, prompt, results)
# %%
docstring_prompt1 = '''def old(self, first, files, page, names, size, read):
"""sector gap population
:param page: message tree
:param names: detail mine
:param'''
correct_param1 = " size"
incorrect_params1 = [" self", " first", " files", " page", " names", " read"]
docstring_prompt2 = '''def port(self, load, size, file, last):
"""oil column piece
:param load: crime population
:param size: unit dark
:param'''
correct_param2 = " file"
incorrect_params2 = [" self", " load", " size", " last"]
# %%
four_layer_attn_only = HookedTransformer.from_pretrained("attn-only-4l")
# %%
def get_model_completions(prompt):
logits = four_layer_attn_only(prompt)
_, indices = torch.topk(logits[0, -1], 10)
print(four_layer_attn_only.to_str_tokens(indices))
# %%
docstring_results = layer_level_connectom(model, docstring_prompt1, kl_on_last_token, threshold=0.2)
# %%
def map_connectome_for_docstring_task(
model: HookedTransformer,
prompt: str,
correct_param: str,
incorrect_param: List[str],
threshold=1.0,
):
correct_param_id = int(model.to_single_token(correct_param))
incorrect_param_ids = [int(model.to_single_token(token)) for token in incorrect_param]
docstring_results = layer_level_connectom(
model,
prompt,
docstring_metric(correct_param_id, incorrect_param_ids),
threshold=threshold,
)
sankey_diagram_of_connectome(model, prompt, docstring_results)
# %%
map_connectome_for_docstring_task(
model,
docstring_prompt1,
correct_param1,
incorrect_params1,
)
# %%
map_connectome_for_docstring_task(
four_layer_attn_only,
docstring_prompt2,
correct_param2,
incorrect_params2,
threshold=1.5,
)
# %%