Spaces:
Sleeping
Sleeping
MilesCranmer
commited on
Merge pull request #425 from MilesCranmer/pre-commit
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .github/ISSUE_TEMPLATE/feature_request.yml +0 -1
- .github/workflows/CI.yml +3 -3
- .github/workflows/CI_Windows.yml +1 -1
- .github/workflows/CI_conda_forge.yml +1 -1
- .github/workflows/CI_docker_large_nightly.yml +2 -2
- .github/workflows/CI_large_nightly.yml +1 -1
- .github/workflows/CI_mac.yml +1 -1
- .github/workflows/codeql-analysis.yml +3 -3
- .github/workflows/docker_deploy.yml +1 -1
- .github/workflows/docs.yml +2 -2
- .github/workflows/update_backend.yml +1 -1
- .pre-commit-config.yaml +31 -0
- CONTRIBUTORS.md +1 -1
- README.md +2 -2
- benchmarks/README.md +1 -1
- benchmarks/hyperparamopt.py +6 -4
- benchmarks/print_best_model.py +4 -6
- benchmarks/space.py +1 -1
- datasets/FeynmanEquations.csv +1 -1
- docs/.gitignore +1 -1
- docs/_api.md +1 -3
- docs/assets/pysr_logo.svg +1 -1
- docs/assets/pysr_logo_reduced.svg +1 -1
- docs/backend.md +2 -2
- docs/gen_param_docs.py +6 -5
- docs/generate_papers.py +3 -2
- docs/operators.md +0 -2
- docs/options.md +1 -1
- docs/papers.yml +0 -1
- docs/requirements.txt +1 -1
- docs/stylesheets/extra.css +1 -1
- docs/stylesheets/papers_header.txt +0 -1
- docs/tuning.md +2 -2
- environment.yml +1 -1
- examples/pysr_demo.ipynb +0 -0
- mkdocs.yml +1 -1
- pyproject.toml +2 -0
- pysr/__init__.py +20 -11
- pysr/_cli/main.py +1 -0
- pysr/export_jax.py +0 -3
- pysr/export_latex.py +3 -3
- pysr/export_numpy.py +2 -1
- pysr/export_torch.py +1 -0
- pysr/feynman_problems.py +6 -3
- pysr/julia_helpers.py +4 -3
- pysr/sr.py +21 -24
- pysr/test/__init__.py +3 -1
- pysr/test/test.py +14 -14
- pysr/test/test_cli.py +2 -0
- pysr/test/test_env.py +1 -1
.github/ISSUE_TEMPLATE/feature_request.yml
CHANGED
@@ -19,4 +19,3 @@ body:
|
|
19 |
attributes:
|
20 |
value: |
|
21 |
Be sure to check out the [PySR forums](https://github.com/MilesCranmer/PySR/discussions) to chat with other users about PySR use-cases!
|
22 |
-
|
|
|
19 |
attributes:
|
20 |
value: |
|
21 |
Be sure to check out the [PySR forums](https://github.com/MilesCranmer/PySR/discussions) to chat with other users about PySR use-cases!
|
|
.github/workflows/CI.yml
CHANGED
@@ -32,7 +32,7 @@ jobs:
|
|
32 |
julia-version: ['1.9']
|
33 |
python-version: ['3.10']
|
34 |
os: [ubuntu-latest]
|
35 |
-
|
36 |
steps:
|
37 |
- uses: actions/checkout@v3
|
38 |
- name: "Set up Julia"
|
@@ -96,7 +96,7 @@ jobs:
|
|
96 |
matrix:
|
97 |
python-version: ['3.9']
|
98 |
os: ['ubuntu-latest']
|
99 |
-
|
100 |
steps:
|
101 |
- uses: actions/checkout@v3
|
102 |
- name: "Cache conda"
|
@@ -129,7 +129,7 @@ jobs:
|
|
129 |
|
130 |
coveralls:
|
131 |
name: Indicate completion to coveralls.io
|
132 |
-
needs:
|
133 |
- test
|
134 |
runs-on: ubuntu-latest
|
135 |
defaults:
|
|
|
32 |
julia-version: ['1.9']
|
33 |
python-version: ['3.10']
|
34 |
os: [ubuntu-latest]
|
35 |
+
|
36 |
steps:
|
37 |
- uses: actions/checkout@v3
|
38 |
- name: "Set up Julia"
|
|
|
96 |
matrix:
|
97 |
python-version: ['3.9']
|
98 |
os: ['ubuntu-latest']
|
99 |
+
|
100 |
steps:
|
101 |
- uses: actions/checkout@v3
|
102 |
- name: "Cache conda"
|
|
|
129 |
|
130 |
coveralls:
|
131 |
name: Indicate completion to coveralls.io
|
132 |
+
needs:
|
133 |
- test
|
134 |
runs-on: ubuntu-latest
|
135 |
defaults:
|
.github/workflows/CI_Windows.yml
CHANGED
@@ -32,7 +32,7 @@ jobs:
|
|
32 |
julia-version: ['1.9']
|
33 |
python-version: ['3.10']
|
34 |
os: [windows-latest]
|
35 |
-
|
36 |
steps:
|
37 |
- uses: actions/checkout@v3
|
38 |
- name: "Set up Julia"
|
|
|
32 |
julia-version: ['1.9']
|
33 |
python-version: ['3.10']
|
34 |
os: [windows-latest]
|
35 |
+
|
36 |
steps:
|
37 |
- uses: actions/checkout@v3
|
38 |
- name: "Set up Julia"
|
.github/workflows/CI_conda_forge.yml
CHANGED
@@ -23,7 +23,7 @@ jobs:
|
|
23 |
python-version: ['3.8', '3.9', '3.10', '3.11']
|
24 |
os: ['ubuntu-latest', 'macos-latest']
|
25 |
use-mamba: [true, false]
|
26 |
-
|
27 |
steps:
|
28 |
- name: "Set up Conda"
|
29 |
uses: conda-incubator/setup-miniconda@v2
|
|
|
23 |
python-version: ['3.8', '3.9', '3.10', '3.11']
|
24 |
os: ['ubuntu-latest', 'macos-latest']
|
25 |
use-mamba: [true, false]
|
26 |
+
|
27 |
steps:
|
28 |
- name: "Set up Conda"
|
29 |
uses: conda-incubator/setup-miniconda@v2
|
.github/workflows/CI_docker_large_nightly.yml
CHANGED
@@ -22,8 +22,8 @@ jobs:
|
|
22 |
python-version: ['3.10']
|
23 |
os: [ubuntu-latest]
|
24 |
arch: ['linux/amd64', 'linux/arm64']
|
25 |
-
|
26 |
-
|
27 |
steps:
|
28 |
- uses: actions/checkout@v3
|
29 |
- name: Set up QEMU
|
|
|
22 |
python-version: ['3.10']
|
23 |
os: [ubuntu-latest]
|
24 |
arch: ['linux/amd64', 'linux/arm64']
|
25 |
+
|
26 |
+
|
27 |
steps:
|
28 |
- uses: actions/checkout@v3
|
29 |
- name: Set up QEMU
|
.github/workflows/CI_large_nightly.yml
CHANGED
@@ -26,7 +26,7 @@ jobs:
|
|
26 |
julia-version: ['1.6', '1.8', '1.9']
|
27 |
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
|
28 |
os: [ubuntu-latest, macos-latest, windows-latest]
|
29 |
-
|
30 |
steps:
|
31 |
- uses: actions/checkout@v3
|
32 |
- name: "Set up Julia"
|
|
|
26 |
julia-version: ['1.6', '1.8', '1.9']
|
27 |
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
|
28 |
os: [ubuntu-latest, macos-latest, windows-latest]
|
29 |
+
|
30 |
steps:
|
31 |
- uses: actions/checkout@v3
|
32 |
- name: "Set up Julia"
|
.github/workflows/CI_mac.yml
CHANGED
@@ -32,7 +32,7 @@ jobs:
|
|
32 |
julia-version: ['1.9']
|
33 |
python-version: ['3.10']
|
34 |
os: [macos-latest]
|
35 |
-
|
36 |
steps:
|
37 |
- uses: actions/checkout@v3
|
38 |
- name: "Set up Julia"
|
|
|
32 |
julia-version: ['1.9']
|
33 |
python-version: ['3.10']
|
34 |
os: [macos-latest]
|
35 |
+
|
36 |
steps:
|
37 |
- uses: actions/checkout@v3
|
38 |
- name: "Set up Julia"
|
.github/workflows/codeql-analysis.yml
CHANGED
@@ -37,11 +37,11 @@ jobs:
|
|
37 |
# If you wish to specify custom queries, you can do so here or in a config file.
|
38 |
# By default, queries listed here will override any specified in a config file.
|
39 |
# Prefix the list here with "+" to use these queries and those in the config file.
|
40 |
-
|
41 |
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
42 |
# queries: security-extended,security-and-quality
|
43 |
|
44 |
-
|
45 |
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
46 |
# If this step fails, then you should remove it and run the build manually (see below)
|
47 |
- name: Autobuild
|
@@ -50,7 +50,7 @@ jobs:
|
|
50 |
# ℹ️ Command-line programs to run using the OS shell.
|
51 |
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
52 |
|
53 |
-
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
54 |
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
55 |
|
56 |
# - run: |
|
|
|
37 |
# If you wish to specify custom queries, you can do so here or in a config file.
|
38 |
# By default, queries listed here will override any specified in a config file.
|
39 |
# Prefix the list here with "+" to use these queries and those in the config file.
|
40 |
+
|
41 |
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
42 |
# queries: security-extended,security-and-quality
|
43 |
|
44 |
+
|
45 |
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
46 |
# If this step fails, then you should remove it and run the build manually (see below)
|
47 |
- name: Autobuild
|
|
|
50 |
# ℹ️ Command-line programs to run using the OS shell.
|
51 |
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
52 |
|
53 |
+
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
54 |
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
55 |
|
56 |
# - run: |
|
.github/workflows/docker_deploy.yml
CHANGED
@@ -9,7 +9,7 @@ on:
|
|
9 |
tags:
|
10 |
- "v*.*.*"
|
11 |
workflow_dispatch:
|
12 |
-
|
13 |
|
14 |
jobs:
|
15 |
docker:
|
|
|
9 |
tags:
|
10 |
- "v*.*.*"
|
11 |
workflow_dispatch:
|
12 |
+
|
13 |
|
14 |
jobs:
|
15 |
docker:
|
.github/workflows/docs.yml
CHANGED
@@ -18,7 +18,7 @@ jobs:
|
|
18 |
defaults:
|
19 |
run:
|
20 |
shell: bash
|
21 |
-
|
22 |
steps:
|
23 |
- uses: actions/checkout@v3
|
24 |
- name: "Set up Python"
|
@@ -33,4 +33,4 @@ jobs:
|
|
33 |
- name: "Build API docs"
|
34 |
run: cd docs && ./gen_docs.sh
|
35 |
- name: "Deploy documentation"
|
36 |
-
run: mkdocs gh-deploy --force
|
|
|
18 |
defaults:
|
19 |
run:
|
20 |
shell: bash
|
21 |
+
|
22 |
steps:
|
23 |
- uses: actions/checkout@v3
|
24 |
- name: "Set up Python"
|
|
|
33 |
- name: "Build API docs"
|
34 |
run: cd docs && ./gen_docs.sh
|
35 |
- name: "Deploy documentation"
|
36 |
+
run: mkdocs gh-deploy --force
|
.github/workflows/update_backend.yml
CHANGED
@@ -48,7 +48,7 @@ jobs:
|
|
48 |
CURRENT_PYSR_PATCH_VERSION=$(python -c 'import pysr; print(pysr.version.__version__.split(".")[-1], end="")' 2>/dev/null)
|
49 |
NEW_PYSR_PATCH_VERSION=$((CURRENT_PYSR_PATCH_VERSION + 1))
|
50 |
sed -i "s/^__version__ = .*/__version__ = \"$(python -c 'import pysr; print(".".join(pysr.version.__version__.split(".")[:-1]), end="")' 2>/dev/null).${NEW_PYSR_PATCH_VERSION}\"/" pysr/version.py
|
51 |
-
|
52 |
# Set SymbolicRegression.jl version:
|
53 |
sed -i "s/^__symbolic_regression_jl_version__ = .*/__symbolic_regression_jl_version__ = \"${{ steps.get-latest.outputs.version }}\"/" pysr/version.py
|
54 |
|
|
|
48 |
CURRENT_PYSR_PATCH_VERSION=$(python -c 'import pysr; print(pysr.version.__version__.split(".")[-1], end="")' 2>/dev/null)
|
49 |
NEW_PYSR_PATCH_VERSION=$((CURRENT_PYSR_PATCH_VERSION + 1))
|
50 |
sed -i "s/^__version__ = .*/__version__ = \"$(python -c 'import pysr; print(".".join(pysr.version.__version__.split(".")[:-1]), end="")' 2>/dev/null).${NEW_PYSR_PATCH_VERSION}\"/" pysr/version.py
|
51 |
+
|
52 |
# Set SymbolicRegression.jl version:
|
53 |
sed -i "s/^__symbolic_regression_jl_version__ = .*/__symbolic_regression_jl_version__ = \"${{ steps.get-latest.outputs.version }}\"/" pysr/version.py
|
54 |
|
.pre-commit-config.yaml
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
repos:
|
2 |
+
# General linting
|
3 |
+
- repo: https://github.com/pre-commit/pre-commit-hooks
|
4 |
+
rev: v3.2.0
|
5 |
+
hooks:
|
6 |
+
- id: trailing-whitespace
|
7 |
+
- id: end-of-file-fixer
|
8 |
+
- id: check-yaml
|
9 |
+
- id: check-added-large-files
|
10 |
+
# General formatting
|
11 |
+
- repo: https://github.com/psf/black
|
12 |
+
rev: 23.3.0
|
13 |
+
hooks:
|
14 |
+
- id: black
|
15 |
+
- id: black-jupyter
|
16 |
+
# Stripping notebooks
|
17 |
+
- repo: https://github.com/kynan/nbstripout
|
18 |
+
rev: 0.6.1
|
19 |
+
hooks:
|
20 |
+
- id: nbstripout
|
21 |
+
# Unused imports
|
22 |
+
- repo: https://github.com/hadialqattan/pycln
|
23 |
+
rev: "v2.2.2"
|
24 |
+
hooks:
|
25 |
+
- id: pycln
|
26 |
+
# Sorted imports
|
27 |
+
- repo: https://github.com/PyCQA/isort
|
28 |
+
rev: "5.12.0"
|
29 |
+
hooks:
|
30 |
+
- id: isort
|
31 |
+
additional_dependencies: [toml]
|
CONTRIBUTORS.md
CHANGED
@@ -121,4 +121,4 @@ Thanks for being part of the PySR community!
|
|
121 |
<!-- prettier-ignore-end -->
|
122 |
|
123 |
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
124 |
-
</div>
|
|
|
121 |
<!-- prettier-ignore-end -->
|
122 |
|
123 |
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
124 |
+
</div>
|
README.md
CHANGED
@@ -155,7 +155,7 @@ The PySR build in conda includes all required dependencies, so you can install i
|
|
155 |
conda install -c conda-forge pysr
|
156 |
```
|
157 |
|
158 |
-
from within your target conda environment.
|
159 |
|
160 |
However, note that the conda install does not support precompilation of Julia libraries, so the
|
161 |
start time may be slightly slower as the JIT-compilation will be running.
|
@@ -305,7 +305,7 @@ model = PySRRegressor(
|
|
305 |
# ^ 2 populations per core, so one is always running.
|
306 |
population_size=50,
|
307 |
# ^ Slightly larger populations, for greater diversity.
|
308 |
-
ncyclesperiteration=500,
|
309 |
# ^ Generations between migrations.
|
310 |
niterations=10000000, # Run forever
|
311 |
early_stop_condition=(
|
|
|
155 |
conda install -c conda-forge pysr
|
156 |
```
|
157 |
|
158 |
+
from within your target conda environment.
|
159 |
|
160 |
However, note that the conda install does not support precompilation of Julia libraries, so the
|
161 |
start time may be slightly slower as the JIT-compilation will be running.
|
|
|
305 |
# ^ 2 populations per core, so one is always running.
|
306 |
population_size=50,
|
307 |
# ^ Slightly larger populations, for greater diversity.
|
308 |
+
ncyclesperiteration=500,
|
309 |
# ^ Generations between migrations.
|
310 |
niterations=10000000, # Run forever
|
311 |
early_stop_condition=(
|
benchmarks/README.md
CHANGED
@@ -21,7 +21,7 @@ v0.3.6 | 25900
|
|
21 |
v0.3.7 | 26600
|
22 |
v0.3.8 | 7470
|
23 |
v0.3.9 | 6760
|
24 |
-
v0.3.10 |
|
25 |
v0.3.11 | 19500
|
26 |
v0.3.12 | 19000
|
27 |
v0.3.13 | 15200
|
|
|
21 |
v0.3.7 | 26600
|
22 |
v0.3.8 | 7470
|
23 |
v0.3.9 | 6760
|
24 |
+
v0.3.10 |
|
25 |
v0.3.11 | 19500
|
26 |
v0.3.12 | 19000
|
27 |
v0.3.13 | 15200
|
benchmarks/hyperparamopt.py
CHANGED
@@ -1,13 +1,15 @@
|
|
1 |
"""Start a hyperoptimization from a single node"""
|
2 |
-
import sys
|
3 |
-
import numpy as np
|
4 |
import pickle as pkl
|
5 |
-
|
|
|
6 |
import hyperopt
|
7 |
-
|
|
|
8 |
from hyperopt.fmin import generate_trials_to_calculate
|
9 |
from space import *
|
10 |
|
|
|
|
|
11 |
# Change the following code to your file
|
12 |
################################################################################
|
13 |
TRIALS_FOLDER = "trials2"
|
|
|
1 |
"""Start a hyperoptimization from a single node"""
|
|
|
|
|
2 |
import pickle as pkl
|
3 |
+
import sys
|
4 |
+
|
5 |
import hyperopt
|
6 |
+
import numpy as np
|
7 |
+
from hyperopt import Trials, fmin, hp, tpe
|
8 |
from hyperopt.fmin import generate_trials_to_calculate
|
9 |
from space import *
|
10 |
|
11 |
+
from pysr import PySRRegressor
|
12 |
+
|
13 |
# Change the following code to your file
|
14 |
################################################################################
|
15 |
TRIALS_FOLDER = "trials2"
|
benchmarks/print_best_model.py
CHANGED
@@ -1,12 +1,11 @@
|
|
1 |
"""Print the best model parameters and loss"""
|
2 |
-
import sys
|
3 |
-
import numpy as np
|
4 |
import pickle as pkl
|
5 |
-
import hyperopt
|
6 |
-
from hyperopt import hp, fmin, tpe, Trials
|
7 |
-
from space import space
|
8 |
from pprint import PrettyPrinter
|
9 |
|
|
|
|
|
|
|
|
|
10 |
|
11 |
# Change the following code to your file
|
12 |
################################################################################
|
@@ -51,7 +50,6 @@ import glob
|
|
51 |
path = TRIALS_FOLDER + "/*.pkl"
|
52 |
files = 0
|
53 |
for fname in glob.glob(path):
|
54 |
-
|
55 |
trials_obj = pkl.load(open(fname, "rb"))
|
56 |
n_trials = trials_obj["n"]
|
57 |
trials_obj = trials_obj["trials"]
|
|
|
1 |
"""Print the best model parameters and loss"""
|
|
|
|
|
2 |
import pickle as pkl
|
|
|
|
|
|
|
3 |
from pprint import PrettyPrinter
|
4 |
|
5 |
+
import hyperopt
|
6 |
+
import numpy as np
|
7 |
+
from hyperopt import Trials, fmin, hp, tpe
|
8 |
+
from space import space
|
9 |
|
10 |
# Change the following code to your file
|
11 |
################################################################################
|
|
|
50 |
path = TRIALS_FOLDER + "/*.pkl"
|
51 |
files = 0
|
52 |
for fname in glob.glob(path):
|
|
|
53 |
trials_obj = pkl.load(open(fname, "rb"))
|
54 |
n_trials = trials_obj["n"]
|
55 |
trials_obj = trials_obj["trials"]
|
benchmarks/space.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import numpy as np
|
2 |
-
from hyperopt import
|
3 |
|
4 |
binary_operators = ["*", "/", "+", "-"]
|
5 |
unary_operators = ["sin", "cos", "exp", "log"]
|
|
|
1 |
import numpy as np
|
2 |
+
from hyperopt import Trials, fmin, hp, tpe
|
3 |
|
4 |
binary_operators = ["*", "/", "+", "-"]
|
5 |
unary_operators = ["sin", "cos", "exp", "log"]
|
datasets/FeynmanEquations.csv
CHANGED
@@ -98,4 +98,4 @@ III.15.14,10,96,m,(h/(2*pi))**2/(2*E_n*d**2),3,h,1,5,E_n,1,5,d,1,5,,,,,,,,,,,,,,
|
|
98 |
III.15.27,10,97,k,2*pi*alpha/(n*d),3,alpha,1,5,n,1,5,d,1,5,,,,,,,,,,,,,,,,,,,,,
|
99 |
III.17.37,10,98,f,beta*(1+alpha*cos(theta)),3,beta,1,5,alpha,1,5,theta,1,5,,,,,,,,,,,,,,,,,,,,,
|
100 |
III.19.51,10,99,E_n,-m*q**4/(2*(4*pi*epsilon)**2*(h/(2*pi))**2)*(1/n**2),5,m,1,5,q,1,5,h,1,5,n,1,5,epsilon,1,5,,,,,,,,,,,,,,,
|
101 |
-
III.21.20,10,100,j,-rho_c_0*q*A_vec/m,4,rho_c_0,1,5,q,1,5,A_vec,1,5,m,1,5,,,,,,,,,,,,,,,,,,
|
|
|
98 |
III.15.27,10,97,k,2*pi*alpha/(n*d),3,alpha,1,5,n,1,5,d,1,5,,,,,,,,,,,,,,,,,,,,,
|
99 |
III.17.37,10,98,f,beta*(1+alpha*cos(theta)),3,beta,1,5,alpha,1,5,theta,1,5,,,,,,,,,,,,,,,,,,,,,
|
100 |
III.19.51,10,99,E_n,-m*q**4/(2*(4*pi*epsilon)**2*(h/(2*pi))**2)*(1/n**2),5,m,1,5,q,1,5,h,1,5,n,1,5,epsilon,1,5,,,,,,,,,,,,,,,
|
101 |
+
III.21.20,10,100,j,-rho_c_0*q*A_vec/m,4,rho_c_0,1,5,q,1,5,A_vec,1,5,m,1,5,,,,,,,,,,,,,,,,,,
|
docs/.gitignore
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
build
|
2 |
api.md
|
3 |
index.md.bak
|
4 |
-
papers.md
|
|
|
1 |
build
|
2 |
api.md
|
3 |
index.md.bak
|
4 |
+
papers.md
|
docs/_api.md
CHANGED
@@ -6,7 +6,7 @@ Let's look at them below.
|
|
6 |
PARAMSKEY
|
7 |
|
8 |
## PySRRegressor Functions
|
9 |
-
|
10 |
::: pysr.PySRRegressor.fit
|
11 |
options:
|
12 |
show_root_heading: true
|
@@ -60,5 +60,3 @@ PARAMSKEY
|
|
60 |
show_root_heading: true
|
61 |
heading_level: 3
|
62 |
show_root_full_path: false
|
63 |
-
|
64 |
-
|
|
|
6 |
PARAMSKEY
|
7 |
|
8 |
## PySRRegressor Functions
|
9 |
+
|
10 |
::: pysr.PySRRegressor.fit
|
11 |
options:
|
12 |
show_root_heading: true
|
|
|
60 |
show_root_heading: true
|
61 |
heading_level: 3
|
62 |
show_root_full_path: false
|
|
|
|
docs/assets/pysr_logo.svg
CHANGED
docs/assets/pysr_logo_reduced.svg
CHANGED
docs/backend.md
CHANGED
@@ -12,7 +12,7 @@ Generally you can do this as follows:
|
|
12 |
git clone https://github.com/MilesCranmer/SymbolicRegression.jl
|
13 |
```
|
14 |
2. Edit the source code in `src/` to your requirements:
|
15 |
-
- The documentation for the backend is given [here](https://astroautomata.com/SymbolicRegression.jl/dev/).
|
16 |
- Throughout the package, you will often see template functions which typically use a symbol `T` (such as in the string `where {T<:Real}`). Here, `T` is simply the datatype of the input data and stored constants, such as `Float32` or `Float64`. Writing functions in this way lets us write functions generic to types, while still having access to the specific type specified at compilation time.
|
17 |
- Expressions are stored as binary trees, using the `Node{T}` type, described [here](https://astroautomata.com/SymbolicRegression.jl/dev/types/#SymbolicRegression.CoreModule.EquationModule.Node).
|
18 |
- Parts of the code which are typically edited by users include:
|
@@ -26,4 +26,4 @@ git clone https://github.com/MilesCranmer/SymbolicRegression.jl
|
|
26 |
|
27 |
If you get comfortable enough with the backend, you might consider using the Julia package directly: the API is given on the [SymbolicRegression.jl documentation](https://astroautomata.com/SymbolicRegression.jl/dev/).
|
28 |
|
29 |
-
If you make a change that you think could be useful to other users, don't hesitate to open a pull request on either the PySR or SymbolicRegression.jl repositories! Contributions are very appreciated.
|
|
|
12 |
git clone https://github.com/MilesCranmer/SymbolicRegression.jl
|
13 |
```
|
14 |
2. Edit the source code in `src/` to your requirements:
|
15 |
+
- The documentation for the backend is given [here](https://astroautomata.com/SymbolicRegression.jl/dev/).
|
16 |
- Throughout the package, you will often see template functions which typically use a symbol `T` (such as in the string `where {T<:Real}`). Here, `T` is simply the datatype of the input data and stored constants, such as `Float32` or `Float64`. Writing functions in this way lets us write functions generic to types, while still having access to the specific type specified at compilation time.
|
17 |
- Expressions are stored as binary trees, using the `Node{T}` type, described [here](https://astroautomata.com/SymbolicRegression.jl/dev/types/#SymbolicRegression.CoreModule.EquationModule.Node).
|
18 |
- Parts of the code which are typically edited by users include:
|
|
|
26 |
|
27 |
If you get comfortable enough with the backend, you might consider using the Julia package directly: the API is given on the [SymbolicRegression.jl documentation](https://astroautomata.com/SymbolicRegression.jl/dev/).
|
28 |
|
29 |
+
If you make a change that you think could be useful to other users, don't hesitate to open a pull request on either the PySR or SymbolicRegression.jl repositories! Contributions are very appreciated.
|
docs/gen_param_docs.py
CHANGED
@@ -1,13 +1,14 @@
|
|
1 |
# Load YAML file param_groupings.yml:
|
2 |
-
|
3 |
-
from yaml import safe_load
|
4 |
import sys
|
5 |
|
|
|
|
|
|
|
6 |
sys.path.append("..")
|
|
|
|
|
7 |
from pysr import PySRRegressor
|
8 |
-
import pysr
|
9 |
-
import re
|
10 |
-
from docstring_parser import parse
|
11 |
|
12 |
found_params = []
|
13 |
|
|
|
1 |
# Load YAML file param_groupings.yml:
|
2 |
+
import re
|
|
|
3 |
import sys
|
4 |
|
5 |
+
from docstring_parser import parse
|
6 |
+
from yaml import safe_load
|
7 |
+
|
8 |
sys.path.append("..")
|
9 |
+
|
10 |
+
|
11 |
from pysr import PySRRegressor
|
|
|
|
|
|
|
12 |
|
13 |
found_params = []
|
14 |
|
docs/generate_papers.py
CHANGED
@@ -1,7 +1,8 @@
|
|
1 |
"""This script generates the papers.md file from the papers.yml file."""
|
2 |
-
import yaml
|
3 |
from pathlib import Path
|
4 |
|
|
|
|
|
5 |
data_file = "papers.yml"
|
6 |
papers_header = Path("stylesheets") / "papers_header.txt"
|
7 |
output_file = "papers.md"
|
@@ -49,7 +50,7 @@ with open(output_file, "w") as f:
|
|
49 |
|
50 |
<center>
|
51 |
{authors}
|
52 |
-
|
53 |
<small>{affiliations}</small>
|
54 |
</center>
|
55 |
|
|
|
1 |
"""This script generates the papers.md file from the papers.yml file."""
|
|
|
2 |
from pathlib import Path
|
3 |
|
4 |
+
import yaml
|
5 |
+
|
6 |
data_file = "papers.yml"
|
7 |
papers_header = Path("stylesheets") / "papers_header.txt"
|
8 |
output_file = "papers.md"
|
|
|
50 |
|
51 |
<center>
|
52 |
{authors}
|
53 |
+
|
54 |
<small>{affiliations}</small>
|
55 |
</center>
|
56 |
|
docs/operators.md
CHANGED
@@ -64,5 +64,3 @@ instead of `1.5e3`, if you write any constant numbers.
|
|
64 |
Your operator should work with the entire real line (you can use
|
65 |
abs(x) for operators requiring positive input - see `log_abs`); otherwise
|
66 |
the search code will experience domain errors.
|
67 |
-
|
68 |
-
|
|
|
64 |
Your operator should work with the entire real line (you can use
|
65 |
abs(x) for operators requiring positive input - see `log_abs`); otherwise
|
66 |
the search code will experience domain errors.
|
|
|
|
docs/options.md
CHANGED
@@ -265,7 +265,7 @@ PySRRegressor(..., loss="loss(x, y) = abs(x * y)")
|
|
265 |
With weights:
|
266 |
|
267 |
```python
|
268 |
-
model = PySRRegressor(..., loss="myloss(x, y, w) = w * abs(x - y)")
|
269 |
model.fit(..., weights=weights)
|
270 |
```
|
271 |
|
|
|
265 |
With weights:
|
266 |
|
267 |
```python
|
268 |
+
model = PySRRegressor(..., loss="myloss(x, y, w) = w * abs(x - y)")
|
269 |
model.fit(..., weights=weights)
|
270 |
```
|
271 |
|
docs/papers.yml
CHANGED
@@ -151,7 +151,6 @@ papers:
|
|
151 |
abstract: "We present an approach for using machine learning to automatically discover the governing equations and hidden properties of real physical systems from observations. We train a \"graph neural network\" to simulate the dynamics of our solar system's Sun, planets, and large moons from 30 years of trajectory data. We then use symbolic regression to discover an analytical expression for the force law implicitly learned by the neural network, which our results showed is equivalent to Newton's law of gravitation. The key assumptions that were required were translational and rotational equivariance, and Newton's second and third laws of motion. Our approach correctly discovered the form of the symbolic force law. Furthermore, our approach did not require any assumptions about the masses of planets and moons or physical constants. They, too, were accurately inferred through our methods. Though, of course, the classical law of gravitation has been known since Isaac Newton, our result serves as a validation that our method can discover unknown laws and hidden properties from observed data. More broadly this work represents a key step toward realizing the potential of machine learning for accelerating scientific discovery."
|
152 |
image: rediscovering_gravity.png
|
153 |
date: 2022-02-04
|
154 |
-
link: https://arxiv.org/abs/2202.02306
|
155 |
- title: (Thesis) On Neural Differential Equations - Section 6.1
|
156 |
authors:
|
157 |
- Patrick Kidger (1)
|
|
|
151 |
abstract: "We present an approach for using machine learning to automatically discover the governing equations and hidden properties of real physical systems from observations. We train a \"graph neural network\" to simulate the dynamics of our solar system's Sun, planets, and large moons from 30 years of trajectory data. We then use symbolic regression to discover an analytical expression for the force law implicitly learned by the neural network, which our results showed is equivalent to Newton's law of gravitation. The key assumptions that were required were translational and rotational equivariance, and Newton's second and third laws of motion. Our approach correctly discovered the form of the symbolic force law. Furthermore, our approach did not require any assumptions about the masses of planets and moons or physical constants. They, too, were accurately inferred through our methods. Though, of course, the classical law of gravitation has been known since Isaac Newton, our result serves as a validation that our method can discover unknown laws and hidden properties from observed data. More broadly this work represents a key step toward realizing the potential of machine learning for accelerating scientific discovery."
|
152 |
image: rediscovering_gravity.png
|
153 |
date: 2022-02-04
|
|
|
154 |
- title: (Thesis) On Neural Differential Equations - Section 6.1
|
155 |
authors:
|
156 |
- Patrick Kidger (1)
|
docs/requirements.txt
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
mkdocs-material
|
2 |
mkdocs-autorefs
|
3 |
mkdocstrings[python]
|
4 |
-
docstring_parser
|
|
|
1 |
mkdocs-material
|
2 |
mkdocs-autorefs
|
3 |
mkdocstrings[python]
|
4 |
+
docstring_parser
|
docs/stylesheets/extra.css
CHANGED
@@ -2,4 +2,4 @@
|
|
2 |
--md-primary-fg-color: #C13245;
|
3 |
--md-primary-fg-color--light: #D35364;
|
4 |
--md-primary-fg-color--dark: #982736;
|
5 |
-
}
|
|
|
2 |
--md-primary-fg-color: #C13245;
|
3 |
--md-primary-fg-color--light: #D35364;
|
4 |
--md-primary-fg-color--dark: #982736;
|
5 |
+
}
|
docs/stylesheets/papers_header.txt
CHANGED
@@ -6,4 +6,3 @@ These are sorted by the date of release, with most recent papers at the top.
|
|
6 |
|
7 |
If you have used PySR in your research,
|
8 |
please submit a pull request to add your paper to [this file](https://github.com/MilesCranmer/PySR/blob/master/docs/papers.yml).
|
9 |
-
|
|
|
6 |
|
7 |
If you have used PySR in your research,
|
8 |
please submit a pull request to add your paper to [this file](https://github.com/MilesCranmer/PySR/blob/master/docs/papers.yml).
|
|
docs/tuning.md
CHANGED
@@ -17,7 +17,7 @@ I run from IPython (Jupyter Notebooks don't work as well[^1]) on the head node o
|
|
17 |
5. Set `ncyclesperiteration` to maybe `5000` or so, until the head node occupation is under `10%`.
|
18 |
6. Set `constraints` and `nested_constraints` as strict as possible. These can help quite a bit with exploration. Typically, if I am using `pow`, I would set `constraints={"pow": (9, 1)}`, so that power laws can only have a variable or constant as their exponent. If I am using `sin` and `cos`, I also like to set `nested_constraints={"sin": {"sin": 0, "cos": 0}, "cos": {"sin": 0, "cos": 0}}`, so that sin and cos can't be nested, which seems to happen frequently. (Although in practice I would just use `sin`, since the search could always add a phase offset!)
|
19 |
7. Set `maxsize` a bit larger than the final size you want. e.g., if you want a final equation of size `30`, you might set this to `35`, so that it has a bit of room to explore.
|
20 |
-
8. Set `maxdepth` strictly, but leave a bit of room for exploration. e.g., if you want a final equation limited to a depth of `5`, you might set this to `6` or `7`, so that it has a bit of room to explore.
|
21 |
9. Set `parsimony` equal to about the minimum loss you would expect, divided by 5-10. e.g., if you expect the final equation to have a loss of `0.001`, you might set `parsimony=0.0001`.
|
22 |
10. Set `weight_optimize` to some larger value, maybe `0.001`. This is very important if `ncyclesperiteration` is large, so that optimization happens more frequently.
|
23 |
11. Set `turbo` to `True`. This may or not work, if there's an error just turn it off (some operators are not SIMD-capable). If it does work, it should give you a nice 20% speedup.
|
@@ -31,7 +31,7 @@ Some things I try out to see if they help:
|
|
31 |
2. Try setting `adaptive_parsimony_scaling` a bit larger, maybe up to `1000`.
|
32 |
3. Sometimes I try using `warmup_maxsize_by`. This is useful if you find that the search finds a very complex equation very quickly, and then gets stuck. It basically forces it to start at the simpler equations and build up complexity slowly.
|
33 |
4. Play around with different losses:
|
34 |
-
- I typically try `L2DistLoss()` and `L1DistLoss()`. L1 loss is more robust to outliers compared to L2 (L1 finds the median, while L2 finds the mean of a random variable), so is often a good choice for a noisy dataset.
|
35 |
- I might also provide the `weights` parameter to `fit` if there is some reasonable choice of weighting. For example, maybe I know the signal-to-noise of a particular row of `y` - I would set that SNR equal to the weights. Or, perhaps I do some sort of importance sampling, and weight the rows by importance.
|
36 |
|
37 |
Very rarely I might also try tuning the mutation weights, the crossover probability, or the optimization parameters. I never use `denoise` or `select_k_features` as I find they aren't very useful.
|
|
|
17 |
5. Set `ncyclesperiteration` to maybe `5000` or so, until the head node occupation is under `10%`.
|
18 |
6. Set `constraints` and `nested_constraints` as strict as possible. These can help quite a bit with exploration. Typically, if I am using `pow`, I would set `constraints={"pow": (9, 1)}`, so that power laws can only have a variable or constant as their exponent. If I am using `sin` and `cos`, I also like to set `nested_constraints={"sin": {"sin": 0, "cos": 0}, "cos": {"sin": 0, "cos": 0}}`, so that sin and cos can't be nested, which seems to happen frequently. (Although in practice I would just use `sin`, since the search could always add a phase offset!)
|
19 |
7. Set `maxsize` a bit larger than the final size you want. e.g., if you want a final equation of size `30`, you might set this to `35`, so that it has a bit of room to explore.
|
20 |
+
8. Set `maxdepth` strictly, but leave a bit of room for exploration. e.g., if you want a final equation limited to a depth of `5`, you might set this to `6` or `7`, so that it has a bit of room to explore.
|
21 |
9. Set `parsimony` equal to about the minimum loss you would expect, divided by 5-10. e.g., if you expect the final equation to have a loss of `0.001`, you might set `parsimony=0.0001`.
|
22 |
10. Set `weight_optimize` to some larger value, maybe `0.001`. This is very important if `ncyclesperiteration` is large, so that optimization happens more frequently.
|
23 |
11. Set `turbo` to `True`. This may or not work, if there's an error just turn it off (some operators are not SIMD-capable). If it does work, it should give you a nice 20% speedup.
|
|
|
31 |
2. Try setting `adaptive_parsimony_scaling` a bit larger, maybe up to `1000`.
|
32 |
3. Sometimes I try using `warmup_maxsize_by`. This is useful if you find that the search finds a very complex equation very quickly, and then gets stuck. It basically forces it to start at the simpler equations and build up complexity slowly.
|
33 |
4. Play around with different losses:
|
34 |
+
- I typically try `L2DistLoss()` and `L1DistLoss()`. L1 loss is more robust to outliers compared to L2 (L1 finds the median, while L2 finds the mean of a random variable), so is often a good choice for a noisy dataset.
|
35 |
- I might also provide the `weights` parameter to `fit` if there is some reasonable choice of weighting. For example, maybe I know the signal-to-noise of a particular row of `y` - I would set that SNR equal to the weights. Or, perhaps I do some sort of importance sampling, and weight the rows by importance.
|
36 |
|
37 |
Very rarely I might also try tuning the mutation weights, the crossover probability, or the optimization parameters. I never use `denoise` or `select_k_features` as I find they aren't very useful.
|
environment.yml
CHANGED
@@ -10,4 +10,4 @@ dependencies:
|
|
10 |
- pyjulia
|
11 |
- openlibm
|
12 |
- openspecfun
|
13 |
-
- click
|
|
|
10 |
- pyjulia
|
11 |
- openlibm
|
12 |
- openspecfun
|
13 |
+
- click
|
examples/pysr_demo.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
mkdocs.yml
CHANGED
@@ -13,7 +13,7 @@ theme:
|
|
13 |
toggle:
|
14 |
icon: material/toggle-switch-off-outline
|
15 |
name: Switch to light mode
|
16 |
-
|
17 |
|
18 |
features:
|
19 |
- navigation.expand
|
|
|
13 |
toggle:
|
14 |
icon: material/toggle-switch-off-outline
|
15 |
name: Switch to light mode
|
16 |
+
|
17 |
|
18 |
features:
|
19 |
- navigation.expand
|
pyproject.toml
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
[tool.isort]
|
2 |
+
profile = "black"
|
pysr/__init__.py
CHANGED
@@ -1,14 +1,23 @@
|
|
1 |
from . import sklearn_monkeypatch
|
2 |
-
from .version import __version__
|
3 |
-
from .sr import (
|
4 |
-
pysr,
|
5 |
-
PySRRegressor,
|
6 |
-
best,
|
7 |
-
best_tex,
|
8 |
-
best_callable,
|
9 |
-
best_row,
|
10 |
-
)
|
11 |
-
from .julia_helpers import install
|
12 |
-
from .feynman_problems import Problem, FeynmanProblem
|
13 |
from .export_jax import sympy2jax
|
14 |
from .export_torch import sympy2torch
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from . import sklearn_monkeypatch
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
from .export_jax import sympy2jax
|
3 |
from .export_torch import sympy2torch
|
4 |
+
from .feynman_problems import FeynmanProblem, Problem
|
5 |
+
from .julia_helpers import install
|
6 |
+
from .sr import PySRRegressor, best, best_callable, best_row, best_tex, pysr
|
7 |
+
from .version import __version__
|
8 |
+
|
9 |
+
__all__ = [
|
10 |
+
"sklearn_monkeypatch",
|
11 |
+
"sympy2jax",
|
12 |
+
"sympy2torch",
|
13 |
+
"FeynmanProblem",
|
14 |
+
"Problem",
|
15 |
+
"install",
|
16 |
+
"PySRRegressor",
|
17 |
+
"best",
|
18 |
+
"best_callable",
|
19 |
+
"best_row",
|
20 |
+
"best_tex",
|
21 |
+
"pysr",
|
22 |
+
"__version__",
|
23 |
+
]
|
pysr/_cli/main.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import click
|
|
|
2 |
from ..julia_helpers import install
|
3 |
|
4 |
|
|
|
1 |
import click
|
2 |
+
|
3 |
from ..julia_helpers import install
|
4 |
|
5 |
|
pysr/export_jax.py
CHANGED
@@ -1,7 +1,4 @@
|
|
1 |
-
import functools as ft
|
2 |
import sympy
|
3 |
-
import string
|
4 |
-
import random
|
5 |
|
6 |
# Special since need to reduce arguments.
|
7 |
MUL = 0
|
|
|
|
|
1 |
import sympy
|
|
|
|
|
2 |
|
3 |
# Special since need to reduce arguments.
|
4 |
MUL = 0
|
pysr/export_latex.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
"""Functions to help export PySR equations to LaTeX."""
|
|
|
|
|
|
|
2 |
import sympy
|
3 |
from sympy.printing.latex import LatexPrinter
|
4 |
-
import pandas as pd
|
5 |
-
from typing import List
|
6 |
-
import warnings
|
7 |
|
8 |
|
9 |
class PreciseLatexPrinter(LatexPrinter):
|
|
|
1 |
"""Functions to help export PySR equations to LaTeX."""
|
2 |
+
from typing import List
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
import sympy
|
6 |
from sympy.printing.latex import LatexPrinter
|
|
|
|
|
|
|
7 |
|
8 |
|
9 |
class PreciseLatexPrinter(LatexPrinter):
|
pysr/export_numpy.py
CHANGED
@@ -1,8 +1,9 @@
|
|
1 |
"""Code for exporting discovered expressions to numpy"""
|
|
|
|
|
2 |
import numpy as np
|
3 |
import pandas as pd
|
4 |
from sympy import lambdify
|
5 |
-
import warnings
|
6 |
|
7 |
|
8 |
class CallableEquation:
|
|
|
1 |
"""Code for exporting discovered expressions to numpy"""
|
2 |
+
import warnings
|
3 |
+
|
4 |
import numpy as np
|
5 |
import pandas as pd
|
6 |
from sympy import lambdify
|
|
|
7 |
|
8 |
|
9 |
class CallableEquation:
|
pysr/export_torch.py
CHANGED
@@ -5,6 +5,7 @@
|
|
5 |
|
6 |
import collections as co
|
7 |
import functools as ft
|
|
|
8 |
import sympy
|
9 |
|
10 |
|
|
|
5 |
|
6 |
import collections as co
|
7 |
import functools as ft
|
8 |
+
|
9 |
import sympy
|
10 |
|
11 |
|
pysr/feynman_problems.py
CHANGED
@@ -1,8 +1,10 @@
|
|
1 |
-
import numpy as np
|
2 |
import csv
|
3 |
-
from .sr import pysr, best
|
4 |
-
from pathlib import Path
|
5 |
from functools import partial
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
PKG_DIR = Path(__file__).parents[1]
|
8 |
FEYNMAN_DATASET = PKG_DIR / "datasets" / "FeynmanEquations.csv"
|
@@ -118,6 +120,7 @@ def do_feynman_experiments_parallel(
|
|
118 |
data_dir=FEYNMAN_DATASET,
|
119 |
):
|
120 |
import multiprocessing as mp
|
|
|
121 |
from tqdm import tqdm
|
122 |
|
123 |
problems = mk_problems(first=first, gen=True, dp=dp, data_dir=data_dir)
|
|
|
|
|
1 |
import csv
|
|
|
|
|
2 |
from functools import partial
|
3 |
+
from pathlib import Path
|
4 |
+
|
5 |
+
import numpy as np
|
6 |
+
|
7 |
+
from .sr import best, pysr
|
8 |
|
9 |
PKG_DIR = Path(__file__).parents[1]
|
10 |
FEYNMAN_DATASET = PKG_DIR / "datasets" / "FeynmanEquations.csv"
|
|
|
120 |
data_dir=FEYNMAN_DATASET,
|
121 |
):
|
122 |
import multiprocessing as mp
|
123 |
+
|
124 |
from tqdm import tqdm
|
125 |
|
126 |
problems = mk_problems(first=first, gen=True, dp=dp, data_dir=data_dir)
|
pysr/julia_helpers.py
CHANGED
@@ -1,12 +1,13 @@
|
|
1 |
"""Functions for initializing the Julia environment and installing deps."""
|
2 |
-
import
|
3 |
import subprocess
|
|
|
4 |
import warnings
|
5 |
from pathlib import Path
|
6 |
-
|
7 |
from julia.api import JuliaError
|
8 |
|
9 |
-
from .version import
|
10 |
|
11 |
juliainfo = None
|
12 |
julia_initialized = False
|
|
|
1 |
"""Functions for initializing the Julia environment and installing deps."""
|
2 |
+
import os
|
3 |
import subprocess
|
4 |
+
import sys
|
5 |
import warnings
|
6 |
from pathlib import Path
|
7 |
+
|
8 |
from julia.api import JuliaError
|
9 |
|
10 |
+
from .version import __symbolic_regression_jl_version__, __version__
|
11 |
|
12 |
juliainfo = None
|
13 |
julia_initialized = False
|
pysr/sr.py
CHANGED
@@ -1,40 +1,37 @@
|
|
1 |
"""Define the PySRRegressor scikit-learn interface."""
|
2 |
import copy
|
3 |
-
from io import StringIO
|
4 |
import os
|
5 |
-
import
|
6 |
-
import numpy as np
|
7 |
-
import pandas as pd
|
8 |
-
import sympy
|
9 |
-
from sympy import sympify
|
10 |
import re
|
11 |
-
import tempfile
|
12 |
import shutil
|
13 |
-
|
14 |
-
import
|
15 |
-
from datetime import datetime
|
16 |
import warnings
|
|
|
|
|
17 |
from multiprocessing import cpu_count
|
18 |
-
from
|
|
|
|
|
|
|
|
|
|
|
19 |
from sklearn.utils import check_array, check_consistent_length, check_random_state
|
20 |
-
from sklearn.utils.validation import
|
21 |
-
|
22 |
-
check_is_fitted,
|
23 |
-
)
|
24 |
|
|
|
|
|
|
|
25 |
from .julia_helpers import (
|
26 |
-
init_julia,
|
27 |
-
_process_julia_project,
|
28 |
-
is_julia_version_greater_eq,
|
29 |
_escape_filename,
|
|
|
30 |
_load_cluster_manager,
|
|
|
31 |
_update_julia_project,
|
32 |
-
|
|
|
33 |
)
|
34 |
-
from .export_numpy import CallableEquation
|
35 |
-
from .export_latex import generate_single_table, generate_multiple_tables, to_latex
|
36 |
-
from .deprecated import make_deprecated_kwargs_for_pysr_regressor
|
37 |
-
|
38 |
|
39 |
Main = None # TODO: Rename to more descriptive name like "julia_runtime"
|
40 |
|
@@ -2454,7 +2451,7 @@ def idx_model_selection(equations: pd.DataFrame, model_selection: str) -> int:
|
|
2454 |
def _denoise(X, y, Xresampled=None, random_state=None):
|
2455 |
"""Denoise the dataset using a Gaussian process."""
|
2456 |
from sklearn.gaussian_process import GaussianProcessRegressor
|
2457 |
-
from sklearn.gaussian_process.kernels import RBF,
|
2458 |
|
2459 |
gp_kernel = RBF(np.ones(X.shape[1])) + WhiteKernel(1e-1) + ConstantKernel()
|
2460 |
gpr = GaussianProcessRegressor(
|
|
|
1 |
"""Define the PySRRegressor scikit-learn interface."""
|
2 |
import copy
|
|
|
3 |
import os
|
4 |
+
import pickle as pkl
|
|
|
|
|
|
|
|
|
5 |
import re
|
|
|
6 |
import shutil
|
7 |
+
import sys
|
8 |
+
import tempfile
|
|
|
9 |
import warnings
|
10 |
+
from datetime import datetime
|
11 |
+
from io import StringIO
|
12 |
from multiprocessing import cpu_count
|
13 |
+
from pathlib import Path
|
14 |
+
|
15 |
+
import numpy as np
|
16 |
+
import pandas as pd
|
17 |
+
import sympy
|
18 |
+
from sklearn.base import BaseEstimator, MultiOutputMixin, RegressorMixin
|
19 |
from sklearn.utils import check_array, check_consistent_length, check_random_state
|
20 |
+
from sklearn.utils.validation import _check_feature_names_in, check_is_fitted
|
21 |
+
from sympy import sympify
|
|
|
|
|
22 |
|
23 |
+
from .deprecated import make_deprecated_kwargs_for_pysr_regressor
|
24 |
+
from .export_latex import generate_multiple_tables, generate_single_table, to_latex
|
25 |
+
from .export_numpy import CallableEquation
|
26 |
from .julia_helpers import (
|
|
|
|
|
|
|
27 |
_escape_filename,
|
28 |
+
_load_backend,
|
29 |
_load_cluster_manager,
|
30 |
+
_process_julia_project,
|
31 |
_update_julia_project,
|
32 |
+
init_julia,
|
33 |
+
is_julia_version_greater_eq,
|
34 |
)
|
|
|
|
|
|
|
|
|
35 |
|
36 |
Main = None # TODO: Rename to more descriptive name like "julia_runtime"
|
37 |
|
|
|
2451 |
def _denoise(X, y, Xresampled=None, random_state=None):
|
2452 |
"""Denoise the dataset using a Gaussian process."""
|
2453 |
from sklearn.gaussian_process import GaussianProcessRegressor
|
2454 |
+
from sklearn.gaussian_process.kernels import RBF, ConstantKernel, WhiteKernel
|
2455 |
|
2456 |
gp_kernel = RBF(np.ones(X.shape[1])) + WhiteKernel(1e-1) + ConstantKernel()
|
2457 |
gpr = GaussianProcessRegressor(
|
pysr/test/__init__.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1 |
from .test import runtests
|
|
|
2 |
from .test_env import runtests as runtests_env
|
3 |
from .test_jax import runtests as runtests_jax
|
4 |
from .test_torch import runtests as runtests_torch
|
5 |
-
|
|
|
|
1 |
from .test import runtests
|
2 |
+
from .test_cli import runtests as runtests_cli
|
3 |
from .test_env import runtests as runtests_env
|
4 |
from .test_jax import runtests as runtests_jax
|
5 |
from .test_torch import runtests as runtests_torch
|
6 |
+
|
7 |
+
__all__ = ["runtests", "runtests_env", "runtests_jax", "runtests_torch", "runtests_cli"]
|
pysr/test/test.py
CHANGED
@@ -1,28 +1,28 @@
|
|
|
|
1 |
import os
|
|
|
|
|
2 |
import traceback
|
3 |
-
import inspect
|
4 |
import unittest
|
|
|
|
|
|
|
5 |
import numpy as np
|
|
|
|
|
6 |
from sklearn import model_selection
|
7 |
from sklearn.utils.estimator_checks import check_estimator
|
8 |
-
import sympy
|
9 |
-
import pandas as pd
|
10 |
-
import warnings
|
11 |
-
import pickle as pkl
|
12 |
-
import tempfile
|
13 |
-
from pathlib import Path
|
14 |
|
15 |
-
from .. import julia_helpers
|
16 |
-
from .. import
|
17 |
from ..sr import (
|
18 |
-
run_feature_selection,
|
19 |
-
_handle_feature_selection,
|
20 |
-
_csv_filename_to_pkl_filename,
|
21 |
-
idx_model_selection,
|
22 |
_check_assertions,
|
|
|
|
|
23 |
_process_constraints,
|
|
|
|
|
24 |
)
|
25 |
-
from ..export_latex import to_latex
|
26 |
|
27 |
DEFAULT_PARAMS = inspect.signature(PySRRegressor.__init__).parameters
|
28 |
DEFAULT_NITERATIONS = DEFAULT_PARAMS["niterations"].default
|
|
|
1 |
+
import inspect
|
2 |
import os
|
3 |
+
import pickle as pkl
|
4 |
+
import tempfile
|
5 |
import traceback
|
|
|
6 |
import unittest
|
7 |
+
import warnings
|
8 |
+
from pathlib import Path
|
9 |
+
|
10 |
import numpy as np
|
11 |
+
import pandas as pd
|
12 |
+
import sympy
|
13 |
from sklearn import model_selection
|
14 |
from sklearn.utils.estimator_checks import check_estimator
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
+
from .. import PySRRegressor, julia_helpers
|
17 |
+
from ..export_latex import to_latex
|
18 |
from ..sr import (
|
|
|
|
|
|
|
|
|
19 |
_check_assertions,
|
20 |
+
_csv_filename_to_pkl_filename,
|
21 |
+
_handle_feature_selection,
|
22 |
_process_constraints,
|
23 |
+
idx_model_selection,
|
24 |
+
run_feature_selection,
|
25 |
)
|
|
|
26 |
|
27 |
DEFAULT_PARAMS = inspect.signature(PySRRegressor.__init__).parameters
|
28 |
DEFAULT_NITERATIONS = DEFAULT_PARAMS["niterations"].default
|
pysr/test/test_cli.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1 |
import unittest
|
|
|
2 |
from click import testing as click_testing
|
|
|
3 |
from .._cli.main import pysr
|
4 |
|
5 |
|
|
|
1 |
import unittest
|
2 |
+
|
3 |
from click import testing as click_testing
|
4 |
+
|
5 |
from .._cli.main import pysr
|
6 |
|
7 |
|
pysr/test/test_env.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"""Contains tests for creating and initializing custom Julia projects."""
|
2 |
|
3 |
-
import unittest
|
4 |
import os
|
|
|
5 |
from tempfile import TemporaryDirectory
|
6 |
|
7 |
from .. import julia_helpers
|
|
|
1 |
"""Contains tests for creating and initializing custom Julia projects."""
|
2 |
|
|
|
3 |
import os
|
4 |
+
import unittest
|
5 |
from tempfile import TemporaryDirectory
|
6 |
|
7 |
from .. import julia_helpers
|