Spaces:
Running
Running
tttc3
commited on
Commit
•
4173a8b
1
Parent(s):
9490776
additional fixes
Browse files- pysr/sr.py +16 -8
pysr/sr.py
CHANGED
@@ -779,7 +779,8 @@ class PySRRegressor(BaseEstimator, RegressorMixin, MultiOutputMixin):
|
|
779 |
)
|
780 |
|
781 |
def __repr__(self):
|
782 |
-
"""
|
|
|
783 |
|
784 |
The string `>>>>` denotes which equation is selected by the
|
785 |
`model_selection`.
|
@@ -1512,7 +1513,8 @@ class PySRRegressor(BaseEstimator, RegressorMixin, MultiOutputMixin):
|
|
1512 |
) from error
|
1513 |
|
1514 |
def predict(self, X, index=None):
|
1515 |
-
"""
|
|
|
1516 |
|
1517 |
You may see what equation is used by printing this object. X should
|
1518 |
have the same columns as the training data.
|
@@ -1537,7 +1539,8 @@ class PySRRegressor(BaseEstimator, RegressorMixin, MultiOutputMixin):
|
|
1537 |
return self._decision_function(X, best_equation)
|
1538 |
|
1539 |
def sympy(self, index=None):
|
1540 |
-
"""
|
|
|
1541 |
|
1542 |
Parameters
|
1543 |
----------
|
@@ -1558,7 +1561,8 @@ class PySRRegressor(BaseEstimator, RegressorMixin, MultiOutputMixin):
|
|
1558 |
return best_equation["sympy_format"]
|
1559 |
|
1560 |
def latex(self, index=None):
|
1561 |
-
"""
|
|
|
1562 |
|
1563 |
Parameters
|
1564 |
----------
|
@@ -1579,7 +1583,8 @@ class PySRRegressor(BaseEstimator, RegressorMixin, MultiOutputMixin):
|
|
1579 |
return sympy.latex(sympy_representation)
|
1580 |
|
1581 |
def jax(self, index=None):
|
1582 |
-
"""
|
|
|
1583 |
|
1584 |
Each equation (multiple given if there are multiple outputs) is a dictionary
|
1585 |
containing {"callable": func, "parameters": params}. To call `func`, pass
|
@@ -1606,7 +1611,8 @@ class PySRRegressor(BaseEstimator, RegressorMixin, MultiOutputMixin):
|
|
1606 |
return best_equation["jax_format"]
|
1607 |
|
1608 |
def pytorch(self, index=None):
|
1609 |
-
"""
|
|
|
1610 |
|
1611 |
Each equation (multiple given if there are multiple outputs) is a PyTorch module
|
1612 |
containing the parameters as trainable attributes. You can use the module like
|
@@ -1794,9 +1800,11 @@ def _handle_feature_selection(X, select_k_features, y, variable_names):
|
|
1794 |
|
1795 |
|
1796 |
def run_feature_selection(X, y, select_k_features):
|
1797 |
-
"""
|
|
|
1798 |
the k most important features in X, returning indices for those
|
1799 |
-
features as output.
|
|
|
1800 |
from sklearn.ensemble import RandomForestRegressor
|
1801 |
from sklearn.feature_selection import SelectFromModel
|
1802 |
|
|
|
779 |
)
|
780 |
|
781 |
def __repr__(self):
|
782 |
+
"""
|
783 |
+
Prints all current equations fitted by the model.
|
784 |
|
785 |
The string `>>>>` denotes which equation is selected by the
|
786 |
`model_selection`.
|
|
|
1513 |
) from error
|
1514 |
|
1515 |
def predict(self, X, index=None):
|
1516 |
+
"""
|
1517 |
+
Predict y from input X using the equation chosen by `model_selection`.
|
1518 |
|
1519 |
You may see what equation is used by printing this object. X should
|
1520 |
have the same columns as the training data.
|
|
|
1539 |
return self._decision_function(X, best_equation)
|
1540 |
|
1541 |
def sympy(self, index=None):
|
1542 |
+
"""
|
1543 |
+
Return sympy representation of the equation(s) chosen by `model_selection`.
|
1544 |
|
1545 |
Parameters
|
1546 |
----------
|
|
|
1561 |
return best_equation["sympy_format"]
|
1562 |
|
1563 |
def latex(self, index=None):
|
1564 |
+
"""
|
1565 |
+
Return latex representation of the equation(s) chosen by `model_selection`.
|
1566 |
|
1567 |
Parameters
|
1568 |
----------
|
|
|
1583 |
return sympy.latex(sympy_representation)
|
1584 |
|
1585 |
def jax(self, index=None):
|
1586 |
+
"""
|
1587 |
+
Return jax representation of the equation(s) chosen by `model_selection`.
|
1588 |
|
1589 |
Each equation (multiple given if there are multiple outputs) is a dictionary
|
1590 |
containing {"callable": func, "parameters": params}. To call `func`, pass
|
|
|
1611 |
return best_equation["jax_format"]
|
1612 |
|
1613 |
def pytorch(self, index=None):
|
1614 |
+
"""
|
1615 |
+
Return pytorch representation of the equation(s) chosen by `model_selection`.
|
1616 |
|
1617 |
Each equation (multiple given if there are multiple outputs) is a PyTorch module
|
1618 |
containing the parameters as trainable attributes. You can use the module like
|
|
|
1800 |
|
1801 |
|
1802 |
def run_feature_selection(X, y, select_k_features):
|
1803 |
+
"""
|
1804 |
+
Use a gradient boosting tree regressor as a proxy for finding
|
1805 |
the k most important features in X, returning indices for those
|
1806 |
+
features as output.
|
1807 |
+
"""
|
1808 |
from sklearn.ensemble import RandomForestRegressor
|
1809 |
from sklearn.feature_selection import SelectFromModel
|
1810 |
|