MilesCranmer commited on
Commit
4c9fe98
1 Parent(s): fb2f513

Add early stop conditions to force speed testing

Browse files
Files changed (1) hide show
  1. test/test.py +25 -5
test/test.py CHANGED
@@ -30,14 +30,22 @@ class TestPipeline(unittest.TestCase):
30
 
31
  def test_linear_relation(self):
32
  y = self.X[:, 0]
33
- model = PySRRegressor(**self.default_test_kwargs)
 
 
 
34
  model.fit(self.X, y)
35
  print(model.equations_)
36
  self.assertLessEqual(model.get_best()["loss"], 1e-4)
37
 
38
  def test_multiprocessing(self):
39
  y = self.X[:, 0]
40
- model = PySRRegressor(**self.default_test_kwargs, procs=2, multithreading=False)
 
 
 
 
 
41
  model.fit(self.X, y)
42
  print(model.equations_)
43
  self.assertLessEqual(model.equations_.iloc[-1]["loss"], 1e-4)
@@ -55,6 +63,7 @@ class TestPipeline(unittest.TestCase):
55
  # Test custom operators with constraints:
56
  nested_constraints={"square_op": {"square_op": 3}},
57
  constraints={"square_op": 10},
 
58
  )
59
  model.fit(self.X, y)
60
  equations = model.equations_
@@ -95,6 +104,7 @@ class TestPipeline(unittest.TestCase):
95
  procs=0,
96
  temp_equation_file=True,
97
  delete_tempfiles=False,
 
98
  )
99
  model.fit(X.copy(), y, weights=w)
100
 
@@ -124,6 +134,7 @@ class TestPipeline(unittest.TestCase):
124
  unary_operators=[],
125
  binary_operators=["plus"],
126
  **self.default_test_kwargs,
 
127
  )
128
  self.assertTrue("None" in regressor.__repr__())
129
  regressor.fit(X, y)
@@ -134,7 +145,7 @@ class TestPipeline(unittest.TestCase):
134
  np.testing.assert_almost_equal(regressor.predict(X), y, decimal=1)
135
 
136
  # Test if repeated fit works:
137
- regressor.set_params(niterations=0, warm_start=True)
138
  # This should exit immediately, and use the old equations
139
  regressor.fit(X, y)
140
 
@@ -155,11 +166,18 @@ class TestPipeline(unittest.TestCase):
155
  unary_operators="sq(x) = x^2",
156
  binary_operators="plus",
157
  extra_sympy_mappings={"sq": lambda x: x**2},
158
- **self.default_test_kwargs,
 
 
 
 
159
  procs=0,
160
  denoise=True,
 
 
161
  )
162
  model.fit(self.X, y)
 
163
  self.assertLessEqual(model.get_best()[1]["loss"], 1e-2)
164
  self.assertLessEqual(model.get_best()[1]["loss"], 1e-2)
165
 
@@ -191,6 +209,7 @@ class TestPipeline(unittest.TestCase):
191
  **self.default_test_kwargs,
192
  denoise=True,
193
  nested_constraints={"/": {"+": 1, "-": 1}, "+": {"*": 4}},
 
194
  )
195
  model.fit(X, y, Xresampled=Xresampled)
196
  self.assertNotIn("unused_feature", model.latex())
@@ -348,13 +367,14 @@ class TestMiscellaneous(unittest.TestCase):
348
  def test_scikit_learn_compatibility(self):
349
  """Test PySRRegressor compatibility with scikit-learn."""
350
  model = PySRRegressor(
351
- max_evals=10000,
352
  verbosity=0,
353
  progress=False,
354
  random_state=0,
355
  deterministic=True,
356
  procs=0,
357
  multithreading=False,
 
358
  ) # Return early.
359
 
360
  check_generator = check_estimator(model, generate_only=True)
 
30
 
31
  def test_linear_relation(self):
32
  y = self.X[:, 0]
33
+ model = PySRRegressor(
34
+ **self.default_test_kwargs,
35
+ early_stop_condition="stop_if(loss, complexity) = loss < 1e-4 && complexity == 1",
36
+ )
37
  model.fit(self.X, y)
38
  print(model.equations_)
39
  self.assertLessEqual(model.get_best()["loss"], 1e-4)
40
 
41
  def test_multiprocessing(self):
42
  y = self.X[:, 0]
43
+ model = PySRRegressor(
44
+ **self.default_test_kwargs,
45
+ procs=2,
46
+ multithreading=False,
47
+ early_stop_condition="stop_if(loss, complexity) = loss < 1e-4 && complexity == 1",
48
+ )
49
  model.fit(self.X, y)
50
  print(model.equations_)
51
  self.assertLessEqual(model.equations_.iloc[-1]["loss"], 1e-4)
 
63
  # Test custom operators with constraints:
64
  nested_constraints={"square_op": {"square_op": 3}},
65
  constraints={"square_op": 10},
66
+ early_stop_condition="stop_if(loss, complexity) = loss < 1e-4 && complexity == 3",
67
  )
68
  model.fit(self.X, y)
69
  equations = model.equations_
 
104
  procs=0,
105
  temp_equation_file=True,
106
  delete_tempfiles=False,
107
+ early_stop_condition="stop_if(loss, complexity) = loss < 1e-4 && complexity == 2",
108
  )
109
  model.fit(X.copy(), y, weights=w)
110
 
 
134
  unary_operators=[],
135
  binary_operators=["plus"],
136
  **self.default_test_kwargs,
137
+ early_stop_condition="stop_if(loss, complexity) = loss < 1e-4 && complexity == 3",
138
  )
139
  self.assertTrue("None" in regressor.__repr__())
140
  regressor.fit(X, y)
 
145
  np.testing.assert_almost_equal(regressor.predict(X), y, decimal=1)
146
 
147
  # Test if repeated fit works:
148
+ regressor.set_params(niterations=0, warm_start=True, early_stop_condition=None)
149
  # This should exit immediately, and use the old equations
150
  regressor.fit(X, y)
151
 
 
166
  unary_operators="sq(x) = x^2",
167
  binary_operators="plus",
168
  extra_sympy_mappings={"sq": lambda x: x**2},
169
+ **{
170
+ k: v
171
+ for k, v in self.default_test_kwargs.items()
172
+ if k != "model_selection"
173
+ },
174
  procs=0,
175
  denoise=True,
176
+ early_stop_condition="stop_if(loss, complexity) = loss < 0.05 && complexity == 2",
177
+ model_selection="best",
178
  )
179
  model.fit(self.X, y)
180
+ print(model)
181
  self.assertLessEqual(model.get_best()[1]["loss"], 1e-2)
182
  self.assertLessEqual(model.get_best()[1]["loss"], 1e-2)
183
 
 
209
  **self.default_test_kwargs,
210
  denoise=True,
211
  nested_constraints={"/": {"+": 1, "-": 1}, "+": {"*": 4}},
212
+ early_stop_condition="stop_if(loss, complexity) = loss < 1e-3 && complexity == 7",
213
  )
214
  model.fit(X, y, Xresampled=Xresampled)
215
  self.assertNotIn("unused_feature", model.latex())
 
367
  def test_scikit_learn_compatibility(self):
368
  """Test PySRRegressor compatibility with scikit-learn."""
369
  model = PySRRegressor(
370
+ max_evals=1000,
371
  verbosity=0,
372
  progress=False,
373
  random_state=0,
374
  deterministic=True,
375
  procs=0,
376
  multithreading=False,
377
+ warm_start=False,
378
  ) # Return early.
379
 
380
  check_generator = check_estimator(model, generate_only=True)