MilesCranmer commited on
Commit
f041de4
1 Parent(s): 5276a81

Add weight optimize and adaptive parsimony scaling

Browse files
Files changed (3) hide show
  1. docs/param_groupings.yml +2 -0
  2. pysr/sr.py +19 -0
  3. pysr/version.py +2 -2
docs/param_groupings.yml CHANGED
@@ -22,6 +22,7 @@
22
  - warmup_maxsize_by
23
  - use_frequency
24
  - use_frequency_in_tournament
 
25
  - Mutations:
26
  - weight_add_node
27
  - weight_insert_node
@@ -31,6 +32,7 @@
31
  - weight_mutate_operator
32
  - weight_randomize
33
  - weight_simplify
 
34
  - crossover_probability
35
  - annealing
36
  - alpha
 
22
  - warmup_maxsize_by
23
  - use_frequency
24
  - use_frequency_in_tournament
25
+ - adaptive_parsimony_scaling
26
  - Mutations:
27
  - weight_add_node
28
  - weight_insert_node
 
32
  - weight_mutate_operator
33
  - weight_randomize
34
  - weight_simplify
35
+ - weight_optimize
36
  - crossover_probability
37
  - annealing
38
  - alpha
pysr/sr.py CHANGED
@@ -359,6 +359,13 @@ class PySRRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
359
  Whether to use the frequency mentioned above in the tournament,
360
  rather than just the simulated annealing.
361
  Default is `True`.
 
 
 
 
 
 
 
362
  alpha : float
363
  Initial temperature for simulated annealing
364
  (requires `annealing` to be `True`).
@@ -408,6 +415,12 @@ class PySRRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
408
  weight_simplify : float
409
  Relative likelihood for mutation to simplify constant parts by evaluation
410
  Default is `0.0020`.
 
 
 
 
 
 
411
  crossover_probability : float
412
  Absolute probability of crossover-type genetic operation, instead of a mutation.
413
  Default is `0.066`.
@@ -664,6 +677,7 @@ class PySRRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
664
  parsimony=0.0032,
665
  use_frequency=True,
666
  use_frequency_in_tournament=True,
 
667
  alpha=0.1,
668
  annealing=False,
669
  early_stop_condition=None,
@@ -678,6 +692,7 @@ class PySRRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
678
  weight_mutate_operator=0.47,
679
  weight_randomize=0.00023,
680
  weight_simplify=0.0020,
 
681
  crossover_probability=0.066,
682
  skip_mutation_failures=True,
683
  migration=True,
@@ -748,6 +763,7 @@ class PySRRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
748
  self.parsimony = parsimony
749
  self.use_frequency = use_frequency
750
  self.use_frequency_in_tournament = use_frequency_in_tournament
 
751
  self.alpha = alpha
752
  self.annealing = annealing
753
  # - Evolutionary search parameters
@@ -760,6 +776,7 @@ class PySRRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
760
  self.weight_mutate_operator = weight_mutate_operator
761
  self.weight_randomize = weight_randomize
762
  self.weight_simplify = weight_simplify
 
763
  self.crossover_probability = crossover_probability
764
  self.skip_mutation_failures = skip_mutation_failures
765
  # -- Migration parameters
@@ -1534,6 +1551,7 @@ class PySRRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
1534
  simplify=self.weight_simplify,
1535
  randomize=self.weight_randomize,
1536
  do_nothing=self.weight_do_nothing,
 
1537
  )
1538
 
1539
  # Call to Julia backend.
@@ -1569,6 +1587,7 @@ class PySRRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
1569
  warmup_maxsize_by=self.warmup_maxsize_by,
1570
  use_frequency=self.use_frequency,
1571
  use_frequency_in_tournament=self.use_frequency_in_tournament,
 
1572
  npop=self.population_size,
1573
  ncycles_per_iteration=self.ncyclesperiteration,
1574
  fraction_replaced=self.fraction_replaced,
 
359
  Whether to use the frequency mentioned above in the tournament,
360
  rather than just the simulated annealing.
361
  Default is `True`.
362
+ adaptive_parsimony_scaling : float
363
+ If the adaptive parsimony strategy (`use_frequency` and
364
+ `use_frequency_in_tournament`), this is how much to (exponentially)
365
+ weight the contribution. If you find that the search is only optimizing
366
+ the most complex expressions while the simpler expressions remain stagnant,
367
+ you should increase this value.
368
+ Default is `20.0`.
369
  alpha : float
370
  Initial temperature for simulated annealing
371
  (requires `annealing` to be `True`).
 
415
  weight_simplify : float
416
  Relative likelihood for mutation to simplify constant parts by evaluation
417
  Default is `0.0020`.
418
+ weight_optimize: float
419
+ Constant optimization can also be performed as a mutation, in addition to
420
+ the normal strategy controlled by `optimize_probability` which happens
421
+ every iteration. Using it as a mutation is useful if you want to use
422
+ a large `ncyclesperiteration`, and may not optimize very often.
423
+ Default is `0.0`.
424
  crossover_probability : float
425
  Absolute probability of crossover-type genetic operation, instead of a mutation.
426
  Default is `0.066`.
 
677
  parsimony=0.0032,
678
  use_frequency=True,
679
  use_frequency_in_tournament=True,
680
+ adaptive_parsimony_scaling=20.0,
681
  alpha=0.1,
682
  annealing=False,
683
  early_stop_condition=None,
 
692
  weight_mutate_operator=0.47,
693
  weight_randomize=0.00023,
694
  weight_simplify=0.0020,
695
+ weight_optimize=0.0,
696
  crossover_probability=0.066,
697
  skip_mutation_failures=True,
698
  migration=True,
 
763
  self.parsimony = parsimony
764
  self.use_frequency = use_frequency
765
  self.use_frequency_in_tournament = use_frequency_in_tournament
766
+ self.adaptive_parsimony_scaling = adaptive_parsimony_scaling
767
  self.alpha = alpha
768
  self.annealing = annealing
769
  # - Evolutionary search parameters
 
776
  self.weight_mutate_operator = weight_mutate_operator
777
  self.weight_randomize = weight_randomize
778
  self.weight_simplify = weight_simplify
779
+ self.weight_optimize = weight_optimize
780
  self.crossover_probability = crossover_probability
781
  self.skip_mutation_failures = skip_mutation_failures
782
  # -- Migration parameters
 
1551
  simplify=self.weight_simplify,
1552
  randomize=self.weight_randomize,
1553
  do_nothing=self.weight_do_nothing,
1554
+ optimize=self.weight_optimize,
1555
  )
1556
 
1557
  # Call to Julia backend.
 
1587
  warmup_maxsize_by=self.warmup_maxsize_by,
1588
  use_frequency=self.use_frequency,
1589
  use_frequency_in_tournament=self.use_frequency_in_tournament,
1590
+ adaptive_parsimony_scaling=self.adaptive_parsimony_scaling,
1591
  npop=self.population_size,
1592
  ncycles_per_iteration=self.ncyclesperiteration,
1593
  fraction_replaced=self.fraction_replaced,
pysr/version.py CHANGED
@@ -1,2 +1,2 @@
1
- __version__ = "0.11.6"
2
- __symbolic_regression_jl_version__ = "0.14.0"
 
1
+ __version__ = "0.11.7"
2
+ __symbolic_regression_jl_version__ = "0.14.2"