MilesCranmer commited on
Commit
16c9195
1 Parent(s): 85618f5

Add mutation to generate random tree

Browse files
Files changed (4) hide show
  1. README.md +12 -5
  2. eureqa.jl +3 -1
  3. hyperparams.jl +2 -0
  4. paralleleureqa.jl +4 -3
README.md CHANGED
@@ -68,21 +68,28 @@ Larger alpha means more exploration.
68
 
69
  One can also adjust the relative probabilities of each operation here:
70
  ```julia
71
- weights = [8, 1, 1, 1, 0.1, 2]
72
  ```
73
- (for: 1. perturb constant, 2. mutate operator,
74
- 3. append a node, 4. delete a subtree, 5. simplify equation,
75
- 6. do nothing).
 
 
 
 
 
 
76
 
77
 
78
  # TODO
79
 
80
  - [ ] Hyperparameter tune
 
81
  - [ ] Create a Python interface
82
  - [ ] Create a benchmark for accuracy
83
  - [ ] Create struct to pass through all hyperparameters, instead of treating as constants
84
  - Make sure doesn't affect performance
85
- - [ ] Use NN to generate weights over all probability distribution, and train on some randomly-generated equations
86
  - [ ] Performance:
87
  - [ ] Use an enum for functions instead of storing them?
88
  - Current most expensive operations:
 
68
 
69
  One can also adjust the relative probabilities of each operation here:
70
  ```julia
71
+ weights = [8, 1, 1, 1, 0.1, 0.5, 2]
72
  ```
73
+ for:
74
+
75
+ 1. Perturb constant
76
+ 2. Mutate operator
77
+ 3. Append a node
78
+ 4. Delete a subtree
79
+ 5. Simplify equation
80
+ 6. Randomize completely
81
+ 7. Do nothing
82
 
83
 
84
  # TODO
85
 
86
  - [ ] Hyperparameter tune
87
+ - [ ] Add mutation for constant<->variable
88
  - [ ] Create a Python interface
89
  - [ ] Create a benchmark for accuracy
90
  - [ ] Create struct to pass through all hyperparameters, instead of treating as constants
91
  - Make sure doesn't affect performance
92
+ - [ ] Use NN to generate weights over all probability distribution conditional on error and existing equation, and train on some randomly-generated equations
93
  - [ ] Performance:
94
  - [ ] Use an enum for functions instead of storing them?
95
  - Current most expensive operations:
eureqa.jl CHANGED
@@ -334,7 +334,7 @@ function iterate(
334
 
335
  mutationChoice = rand()
336
  weight_for_constant = min(8, countConstants(tree))
337
- weights = [weight_for_constant, 1, 1, 1, 0.1, 2] .* 1.0
338
  weights /= sum(weights)
339
  cweights = cumsum(weights)
340
  n = countNodes(tree)
@@ -350,6 +350,8 @@ function iterate(
350
  elseif mutationChoice < cweights[5]
351
  tree = simplifyTree(tree) # Sometimes we simplify tree
352
  return tree
 
 
353
  else
354
  return tree
355
  end
 
334
 
335
  mutationChoice = rand()
336
  weight_for_constant = min(8, countConstants(tree))
337
+ weights = [weight_for_constant, 1, 1, 1, 0.1, 0.5, 2] .* 1.0
338
  weights /= sum(weights)
339
  cweights = cumsum(weights)
340
  n = countNodes(tree)
 
350
  elseif mutationChoice < cweights[5]
351
  tree = simplifyTree(tree) # Sometimes we simplify tree
352
  return tree
353
+ elseif mutationChoice < cweights[6]
354
+ tree = genRandomTree(5) # Sometimes we simplify tree
355
  else
356
  return tree
357
  end
hyperparams.jl CHANGED
@@ -26,6 +26,8 @@ const migration = true
26
  const hofMigration = true
27
  # Fraction of population to replace with hall of fame
28
  const fractionReplacedHof = 0.1f0
 
 
29
  ##################
30
 
31
 
 
26
  const hofMigration = true
27
  # Fraction of population to replace with hall of fame
28
  const fractionReplacedHof = 0.1f0
29
+ # Optimize constants
30
+ const shouldOptimizeConstants = true
31
  ##################
32
 
33
 
paralleleureqa.jl CHANGED
@@ -35,8 +35,10 @@ function fullRun(niterations::Integer;
35
  @inbounds Threads.@threads for i=1:nthreads
36
  allPops[i] = run(allPops[i], ncyclesperiteration, annealing, verbosity=verbosity)
37
  bestSubPops[i] = bestSubPop(allPops[i], topn=topn)
38
- for j=1:bestSubPops[i].n
39
- bestSubPops[i].members[j] = optimizeConstants(bestSubPops[i].members[j])
 
 
40
  end
41
  end
42
 
@@ -49,7 +51,6 @@ function fullRun(niterations::Integer;
49
  size = countNodes(member.tree)
50
  if member.score < hallOfFame.members[size].score
51
  hallOfFame.members[size] = deepcopy(member)
52
- #hallOfFame.members[size] = optimizeConstants(hallOfFame.members[size])
53
  hallOfFame.exists[size] = true
54
  end
55
  end
 
35
  @inbounds Threads.@threads for i=1:nthreads
36
  allPops[i] = run(allPops[i], ncyclesperiteration, annealing, verbosity=verbosity)
37
  bestSubPops[i] = bestSubPop(allPops[i], topn=topn)
38
+ if shouldOptimizeConstants
39
+ for j=1:bestSubPops[i].n
40
+ bestSubPops[i].members[j] = optimizeConstants(bestSubPops[i].members[j])
41
+ end
42
  end
43
  end
44
 
 
51
  size = countNodes(member.tree)
52
  if member.score < hallOfFame.members[size].score
53
  hallOfFame.members[size] = deepcopy(member)
 
54
  hallOfFame.exists[size] = true
55
  end
56
  end