Spaces:
Sleeping
Sleeping
MilesCranmer
commited on
Merge pull request #191 from MilesCranmer/backend-update
Browse filesUpdate backend: `x^y` instead of `abs(x)^y`, and high-precision constants
- pysr/sr.py +7 -2
- pysr/version.py +2 -2
- test/test.py +22 -0
pysr/sr.py
CHANGED
@@ -41,13 +41,14 @@ already_ran = False
|
|
41 |
sympy_mappings = {
|
42 |
"div": lambda x, y: x / y,
|
43 |
"mult": lambda x, y: x * y,
|
|
|
44 |
"sqrt_abs": lambda x: sympy.sqrt(abs(x)),
|
45 |
"square": lambda x: x**2,
|
46 |
"cube": lambda x: x**3,
|
47 |
"plus": lambda x, y: x + y,
|
48 |
"sub": lambda x, y: x - y,
|
49 |
"neg": lambda x: -x,
|
50 |
-
"pow": lambda x, y:
|
51 |
"pow_abs": lambda x, y: abs(x) ** y,
|
52 |
"cos": sympy.cos,
|
53 |
"sin": sympy.sin,
|
@@ -59,7 +60,7 @@ sympy_mappings = {
|
|
59 |
"acos": sympy.acos,
|
60 |
"asin": sympy.asin,
|
61 |
"atan": sympy.atan,
|
62 |
-
"acosh": lambda x: sympy.acosh(
|
63 |
"acosh_abs": lambda x: sympy.acosh(abs(x) + 1),
|
64 |
"asinh": sympy.asinh,
|
65 |
"atanh": lambda x: sympy.atanh(sympy.Mod(x + 1, 2) - 1),
|
@@ -68,6 +69,10 @@ sympy_mappings = {
|
|
68 |
"mod": sympy.Mod,
|
69 |
"erf": sympy.erf,
|
70 |
"erfc": sympy.erfc,
|
|
|
|
|
|
|
|
|
71 |
"log_abs": lambda x: sympy.log(abs(x)),
|
72 |
"log10_abs": lambda x: sympy.log(abs(x), 10),
|
73 |
"log2_abs": lambda x: sympy.log(abs(x), 2),
|
|
|
41 |
sympy_mappings = {
|
42 |
"div": lambda x, y: x / y,
|
43 |
"mult": lambda x, y: x * y,
|
44 |
+
"sqrt": lambda x: sympy.sqrt(x),
|
45 |
"sqrt_abs": lambda x: sympy.sqrt(abs(x)),
|
46 |
"square": lambda x: x**2,
|
47 |
"cube": lambda x: x**3,
|
48 |
"plus": lambda x, y: x + y,
|
49 |
"sub": lambda x, y: x - y,
|
50 |
"neg": lambda x: -x,
|
51 |
+
"pow": lambda x, y: x**y,
|
52 |
"pow_abs": lambda x, y: abs(x) ** y,
|
53 |
"cos": sympy.cos,
|
54 |
"sin": sympy.sin,
|
|
|
60 |
"acos": sympy.acos,
|
61 |
"asin": sympy.asin,
|
62 |
"atan": sympy.atan,
|
63 |
+
"acosh": lambda x: sympy.acosh(x),
|
64 |
"acosh_abs": lambda x: sympy.acosh(abs(x) + 1),
|
65 |
"asinh": sympy.asinh,
|
66 |
"atanh": lambda x: sympy.atanh(sympy.Mod(x + 1, 2) - 1),
|
|
|
69 |
"mod": sympy.Mod,
|
70 |
"erf": sympy.erf,
|
71 |
"erfc": sympy.erfc,
|
72 |
+
"log": lambda x: sympy.log(x),
|
73 |
+
"log10": lambda x: sympy.log(x, 10),
|
74 |
+
"log2": lambda x: sympy.log(x, 2),
|
75 |
+
"log1p": lambda x: sympy.log(x + 1),
|
76 |
"log_abs": lambda x: sympy.log(abs(x)),
|
77 |
"log10_abs": lambda x: sympy.log(abs(x), 10),
|
78 |
"log2_abs": lambda x: sympy.log(abs(x), 2),
|
pysr/version.py
CHANGED
@@ -1,2 +1,2 @@
|
|
1 |
-
__version__ = "0.
|
2 |
-
__symbolic_regression_jl_version__ = "0.
|
|
|
1 |
+
__version__ = "0.11.0"
|
2 |
+
__symbolic_regression_jl_version__ = "0.12.0"
|
test/test.py
CHANGED
@@ -82,6 +82,22 @@ class TestPipeline(unittest.TestCase):
|
|
82 |
print(model.equations_)
|
83 |
self.assertLessEqual(model.equations_.iloc[-1]["loss"], 1e-4)
|
84 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
85 |
def test_multioutput_custom_operator_quiet_custom_complexity(self):
|
86 |
y = self.X[:, [0, 1]] ** 2
|
87 |
model = PySRRegressor(
|
@@ -182,6 +198,12 @@ class TestPipeline(unittest.TestCase):
|
|
182 |
warm_start=True,
|
183 |
early_stop_condition=None,
|
184 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
185 |
# This should exit almost immediately, and use the old equations
|
186 |
regressor.fit(X, y)
|
187 |
|
|
|
82 |
print(model.equations_)
|
83 |
self.assertLessEqual(model.equations_.iloc[-1]["loss"], 1e-4)
|
84 |
|
85 |
+
def test_high_precision_search(self):
|
86 |
+
y = 1.23456789 * self.X[:, 0]
|
87 |
+
model = PySRRegressor(
|
88 |
+
**self.default_test_kwargs,
|
89 |
+
early_stop_condition="stop_if(loss, complexity) = loss < 1e-4 && complexity == 3",
|
90 |
+
precision=64,
|
91 |
+
parsimony=0.01,
|
92 |
+
warm_start=True,
|
93 |
+
)
|
94 |
+
model.fit(self.X, y)
|
95 |
+
from pysr.sr import Main
|
96 |
+
|
97 |
+
# We should have that the model state is now a Float64 hof:
|
98 |
+
Main.test_state = model.raw_julia_state_
|
99 |
+
self.assertTrue(Main.eval("typeof(test_state[2]).parameters[1] == Float64"))
|
100 |
+
|
101 |
def test_multioutput_custom_operator_quiet_custom_complexity(self):
|
102 |
y = self.X[:, [0, 1]] ** 2
|
103 |
model = PySRRegressor(
|
|
|
198 |
warm_start=True,
|
199 |
early_stop_condition=None,
|
200 |
)
|
201 |
+
# Check that the the julia state is saved:
|
202 |
+
from pysr.sr import Main
|
203 |
+
|
204 |
+
# We should have that the model state is now a Float32 hof:
|
205 |
+
Main.test_state = regressor.raw_julia_state_
|
206 |
+
self.assertTrue(Main.eval("typeof(test_state[2]).parameters[1] == Float32"))
|
207 |
# This should exit almost immediately, and use the old equations
|
208 |
regressor.fit(X, y)
|
209 |
|