File size: 10,250 Bytes
3bdb76c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 |
from dreamcoder.dreamcoder import ecIterator
from dreamcoder.domains.text.makeTextTasks import makeTasks, loadPBETasks
from dreamcoder.domains.text.textPrimitives import primitives
from dreamcoder.domains.list.listPrimitives import bootstrapTarget
from dreamcoder.enumeration import *
import os
import datetime
import random
from functools import reduce
import dill
class ConstantInstantiateVisitor(object):
def __init__(self, words):
self.words = words
def primitive(self, e):
if e.name == "STRING":
return Primitive("STRING", e.tp, random.choice(self.words))
return e
def invented(self, e): return e.body.visit(self)
def index(self, e): return e
def application(self, e):
return Application(e.f.visit(self), e.x.visit(self))
def abstraction(self, e):
return Abstraction(e.body.visit(self))
try:
from dreamcoder.recognition import *
class LearnedFeatureExtractor(RecurrentFeatureExtractor):
special = 'string'
def tokenize(self, examples):
def tokenize_example(xs,y):
if not isinstance(y, list): y = [y]
return xs,y
return [tokenize_example(*e) for e in examples]
def __init__(self, tasks, testingTasks=[], cuda=False):
lexicon = {c
for t in tasks + testingTasks
for xs, y in self.tokenize(t.examples)
for c in reduce(lambda u, v: u + v, list(xs) + [y])}
self.recomputeTasks = True
super(LearnedFeatureExtractor, self).__init__(lexicon=list(lexicon),
H=64,
tasks=tasks,
bidirectional=True,
cuda=cuda)
self.MAXINPUTS = 8
def taskOfProgram(self, p, tp):
# Instantiate STRING w/ random words
p = p.visit(ConstantInstantiateVisitor.SINGLE)
return super(LearnedFeatureExtractor, self).taskOfProgram(p, tp)
except:
pass
### COMPETITION CODE
def competeOnOneTask(checkpoint, task,
CPUs=8, timeout=3600, evaluationTimeout=0.0005):
if checkpoint.recognitionModel is not None:
recognizer = checkpoint.recognitionModel
challengeFrontiers, times, bestSearchTime = \
recognizer.enumerateFrontiers([task],
CPUs=CPUs,
maximumFrontier=1,
enumerationTimeout=timeout,
evaluationTimeout=evaluationTimeout)
else:
challengeFrontiers, times, bestSearchTimes = \
multicoreEnumeration(checkpoint.grammars[-1], [task],
CPUs=CPUs,
maximumFrontier=1,
enumerationTimeout=timeout,
evaluationTimeout=evaluationTimeout)
if len(times) == 0: return None, task
assert len(times) == 1
return times[0], task
def sygusCompetition(checkpoints, tasks):
from pathos.multiprocessing import Pool
import datetime
# map from task to list of search times, one for each checkpoint.
# search time will be None if it is not solved
searchTimes = {t: [] for t in tasks}
CPUs = int(8/len(checkpoints))
maxWorkers = int(numberOfCPUs()/CPUs)
workers = Pool(maxWorkers)
eprint(f"You gave me {len(checkpoints)} checkpoints to ensemble. Each checkpoint will get {CPUs} CPUs. Creating a pool of {maxWorkers} worker processes.")
timeout = 3600
promises = []
for t in tasks:
for checkpoint in checkpoints:
promise = workers.apply_async(competeOnOneTask,
(checkpoint,t),
{"CPUs": CPUs,
"timeout": timeout})
promises.append(promise)
eprint(f"Queued {len(promises)} jobs.")
for promise in promises:
dt, task = promise.get()
if dt is not None:
searchTimes[task].append(dt)
searchTimes = {t: min(ts) if len(ts) > 0 else None
for t,ts in searchTimes.items()}
fn = "experimentOutputs/text_competition_%s.p"%(datetime.datetime.now().isoformat())
with open(fn,"wb") as handle:
pickle.dump(searchTimes, handle)
eprint()
hits = sum( t is not None for t in searchTimes.values() )
total = len(searchTimes)
percentage = 100*hits/total
eprint("Hits %d/%d = %f\n"%(hits, total, percentage))
eprint()
eprint("Exported competition results to",fn)
def text_options(parser):
parser.add_argument(
"--showTasks",
action="store_true",
default=False,
help="show the training test and challenge tasks and then exit")
parser.add_argument(
"--trainChallenge",
action="store_true",
default=False,
help="Incorporate a random 50% of the challenge problems into the training set")
parser.add_argument(
"--onlyChallenge",
action="store_true",
default=False,
help="Only train on challenge problems and have testing problems.")
parser.add_argument(
"--latest",
action="store_true",
default=False,
help="evaluate on latest sygus problems rather than problems used in ec2 paper")
parser.add_argument(
"--noMap", action="store_true", default=False,
help="Disable built-in map primitive")
parser.add_argument(
"--noLength", action="store_true", default=False,
help="Disable built-in length primitive")
parser.add_argument(
"--noUnfold", action="store_true", default=False,
help="Disable built-in unfold primitive")
parser.add_argument(
"--compete",
nargs='+',
default=None,
type=str,
help="Do a simulated sygus competition (1hr+8cpus/problem) on the sygus tasks, restoring from provided checkpoint(s). If multiple checkpoints are provided, then we ensemble the models.")
def main(arguments):
"""
Takes the return value of the `commandlineArguments()` function as input and
trains/tests the model on manipulating sequences of text.
"""
tasks = makeTasks()
eprint("Generated", len(tasks), "tasks")
for t in tasks:
t.mustTrain = False
test, train = testTrainSplit(tasks, 1.)
eprint("Split tasks into %d/%d test/train" % (len(test), len(train)))
latest = arguments.pop("latest")
challenge, challengeCheating = loadPBETasks("data/sygus" if latest else "PBE_Strings_Track")
eprint("Got %d challenge PBE tasks" % len(challenge))
if arguments.pop('trainChallenge'):
challengeTest, challengeTrain = testTrainSplit(challenge, 0.5)
challenge = challengeTest
train += challengeTrain
eprint(
"Incorporating %d (50%%) challenge problems into the training set." %
(len(challengeTrain)),
"We will evaluate on the held out challenge problems.",
"This makes a total of %d training problems." %
len(train))
if arguments.pop('onlyChallenge'):
train = challenge
test = []
challenge = []
eprint("Training only on sygus problems.")
ConstantInstantiateVisitor.SINGLE = \
ConstantInstantiateVisitor(list(map(list, list({tuple([c for c in s])
for t in test + train + challenge
for s in t.stringConstants}))))
haveLength = not arguments.pop("noLength")
haveMap = not arguments.pop("noMap")
haveUnfold = not arguments.pop("noUnfold")
eprint(f"Including map as a primitive? {haveMap}")
eprint(f"Including length as a primitive? {haveLength}")
eprint(f"Including unfold as a primitive? {haveUnfold}")
baseGrammar = Grammar.uniform(primitives + [p
for p in bootstrapTarget()
if (p.name != "map" or haveMap) and \
(p.name != "unfold" or haveUnfold) and \
(p.name != "length" or haveLength)])
challengeGrammar = baseGrammar # Grammar.uniform(targetTextPrimitives)
evaluationTimeout = 0.0005
# We will spend 10 minutes on each challenge problem
challengeTimeout = 10 * 60
for t in train + test + challenge:
t.maxParameters = 2
if arguments.pop("showTasks"):
for source, ts in [("train",tasks),("test",test),("challenge",challenge)]:
print(source,"tasks:")
for t in ts:
print(t.name)
for xs, y in t.examples:
xs = ['"' + "".join(x) + '"' for x in xs]
y = "".join(y) if isinstance(y,list) else y
print('f(%s) = "%s"' % (", ".join(xs), y))
print("\t{%s}" % (t.stringConstants))
print()
sys.exit(0)
competitionCheckpoints = arguments.pop("compete")
if competitionCheckpoints:
checkpoints = []
for competitionCheckpoint in competitionCheckpoints:
with open(competitionCheckpoint, 'rb') as handle:
checkpoints.append(dill.load(handle))
sygusCompetition(checkpoints, challenge)
sys.exit(0)
timestamp = datetime.datetime.now().isoformat()
outputDirectory = "experimentOutputs/text/%s"%timestamp
os.system("mkdir -p %s"%outputDirectory)
generator = ecIterator(baseGrammar, train,
testingTasks=test + challenge,
outputPrefix="%s/text"%outputDirectory,
evaluationTimeout=evaluationTimeout,
**arguments)
for result in generator:
pass
|