diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index e238de2c8..8be9ae637 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -83,7 +83,6 @@ jobs: shell: bash -l {0} strategy: matrix: - julia-version: ['1.7.1'] python-version: ['3.9'] os: ['ubuntu-latest'] @@ -108,7 +107,7 @@ jobs: - name: "Cache Julia" uses: julia-actions/cache@v1 with: - cache-name: ${{ matrix.os }}-conda-${{ matrix.julia-version }}-${{ matrix.python-version }} + cache-name: ${{ matrix.os }}-conda-${{ matrix.python-version }} cache-packages: false - name: "Install PySR" run: | diff --git a/.github/workflows/CI_Windows.yml b/.github/workflows/CI_Windows.yml index baa9a1b8e..b6751e51d 100644 --- a/.github/workflows/CI_Windows.yml +++ b/.github/workflows/CI_Windows.yml @@ -29,7 +29,7 @@ jobs: shell: bash strategy: matrix: - julia-version: ['1.6', '1.8.2'] + julia-version: ['1.8.2'] python-version: ['3.9'] os: [windows-latest] diff --git a/.github/workflows/pypi_deploy.yml b/.github/workflows/pypi_deploy.yml index 2cb4c47d3..9124e35a3 100644 --- a/.github/workflows/pypi_deploy.yml +++ b/.github/workflows/pypi_deploy.yml @@ -9,12 +9,6 @@ jobs: pypi: runs-on: ubuntu-latest steps: - - name: Wait for tests to pass - uses: lewagon/wait-on-check-action@v1.2.0 - with: - ref: ${{ github.ref }} - check-name: 'Linux' - repo-token: ${{ secrets.GITHUB_TOKEN }} - name: "Checkout" uses: actions/checkout@v3 - name: "Set up Python" diff --git a/examples/pysr_demo.ipynb b/examples/pysr_demo.ipynb index 54a18ffdc..372ffcffd 100644 --- a/examples/pysr_demo.ipynb +++ b/examples/pysr_demo.ipynb @@ -109,11 +109,11 @@ "source": [ "from julia import Julia\n", "\n", - "julia = Julia(compiled_modules=False, threads='auto', optimize=3)\n", + "julia = Julia(compiled_modules=False, threads='auto')\n", "from julia import Main\n", "from julia.tools import redirect_output_streams\n", "\n", - "redirect_output_streams()\n" + "redirect_output_streams()" ] }, { @@ -137,7 +137,8 @@ "source": [ "import pysr\n", "\n", - "pysr.install()\n" + "# We don't precompile in colab because compiled modules are incompatible static Python libraries:\n", + "pysr.install(precompile=False)" ] }, { @@ -157,7 +158,7 @@ "from torch.nn import functional as F\n", "from torch.utils.data import DataLoader, TensorDataset\n", "import pytorch_lightning as pl\n", - "from sklearn.model_selection import train_test_split\n" + "from sklearn.model_selection import train_test_split" ] }, { @@ -191,7 +192,7 @@ "# Dataset\n", "np.random.seed(0)\n", "X = 2 * np.random.randn(100, 5)\n", - "y = 2.5382 * np.cos(X[:, 3]) + X[:, 0] ** 2 - 2\n" + "y = 2.5382 * np.cos(X[:, 3]) + X[:, 0] ** 2 - 2" ] }, { @@ -215,7 +216,7 @@ " populations=30,\n", " procs=4,\n", " model_selection=\"best\",\n", - ")\n" + ")" ] }, { @@ -246,7 +247,7 @@ " **default_pysr_params\n", ")\n", "\n", - "model.fit(X, y)\n" + "model.fit(X, y)" ] }, { @@ -266,7 +267,7 @@ }, "outputs": [], "source": [ - "model\n" + "model" ] }, { @@ -286,7 +287,7 @@ }, "outputs": [], "source": [ - "model.sympy()\n" + "model.sympy()" ] }, { @@ -306,7 +307,7 @@ }, "outputs": [], "source": [ - "model.sympy(2)\n" + "model.sympy(2)" ] }, { @@ -335,7 +336,7 @@ }, "outputs": [], "source": [ - "model.latex()\n" + "model.latex()" ] }, { @@ -361,7 +362,7 @@ "ypredict_simpler = model.predict(X, 2)\n", "\n", "print(\"Default selection MSE:\", np.power(ypredict - y, 2).mean())\n", - "print(\"Manual selection MSE for index 2:\", np.power(ypredict_simpler - y, 2).mean())\n" + "print(\"Manual selection MSE for index 2:\", np.power(ypredict_simpler - y, 2).mean())" ] }, { @@ -395,7 +396,7 @@ }, "outputs": [], "source": [ - "y = X[:, 0] ** 4 - 2\n" + "y = X[:, 0] ** 4 - 2" ] }, { @@ -425,7 +426,7 @@ " unary_operators=[\"cos\", \"exp\", \"sin\", \"quart(x) = x^4\"],\n", " extra_sympy_mappings={\"quart\": lambda x: x**4},\n", ")\n", - "model.fit(X, y)\n" + "model.fit(X, y)" ] }, { @@ -436,7 +437,7 @@ }, "outputs": [], "source": [ - "model.sympy()\n" + "model.sympy()" ] }, { @@ -538,7 +539,7 @@ "X = 2 * np.random.rand(N, 5)\n", "sigma = np.random.rand(N) * (5 - 0.1) + 0.1\n", "eps = sigma * np.random.randn(N)\n", - "y = 5 * np.cos(3.5 * X[:, 0]) - 1.3 + eps\n" + "y = 5 * np.cos(3.5 * X[:, 0]) - 1.3 + eps" ] }, { @@ -560,7 +561,7 @@ "source": [ "plt.scatter(X[:, 0], y, alpha=0.2)\n", "plt.xlabel(\"$x_0$\")\n", - "plt.ylabel(\"$y$\")\n" + "plt.ylabel(\"$y$\")" ] }, { @@ -580,7 +581,7 @@ }, "outputs": [], "source": [ - "weights = 1 / sigma ** 2\n" + "weights = 1 / sigma ** 2" ] }, { @@ -591,7 +592,7 @@ }, "outputs": [], "source": [ - "weights[:5]\n" + "weights[:5]" ] }, { @@ -619,7 +620,7 @@ " binary_operators=[\"plus\", \"mult\"],\n", " unary_operators=[\"cos\"],\n", ")\n", - "model.fit(X, y, weights=weights)\n" + "model.fit(X, y, weights=weights)" ] }, { @@ -639,7 +640,7 @@ }, "outputs": [], "source": [ - "model\n" + "model" ] }, { @@ -662,7 +663,7 @@ "best_idx = model.equations_.query(\n", " f\"loss < {2 * model.equations_.loss.min()}\"\n", ").score.idxmax()\n", - "model.sympy(best_idx)\n" + "model.sympy(best_idx)" ] }, { @@ -693,7 +694,7 @@ "source": [ "plt.scatter(X[:, 0], y, alpha=0.1)\n", "y_prediction = model.predict(X, index=best_idx)\n", - "plt.scatter(X[:, 0], y_prediction)\n" + "plt.scatter(X[:, 0], y_prediction)" ] }, { @@ -719,7 +720,7 @@ "outputs": [], "source": [ "X = 2 * np.random.randn(100, 5)\n", - "y = 1 / X[:, [0, 1, 2]]\n" + "y = 1 / X[:, [0, 1, 2]]" ] }, { @@ -1024,7 +1025,7 @@ "y_i = X[..., 0] ** 2 + 6 * np.cos(2 * X[..., 2])\n", "y = np.sum(y_i, axis=1) / y_i.shape[1]\n", "z = y**2\n", - "X.shape, y.shape\n" + "X.shape, y.shape" ] }, { @@ -1117,7 +1118,7 @@ " ),\n", " \"interval\": \"step\",\n", " }\n", - " return [optimizer], [scheduler]\n" + " return [optimizer], [scheduler]" ] }, { @@ -1152,7 +1153,7 @@ "train_set = TensorDataset(X_train, z_train)\n", "train = DataLoader(train_set, batch_size=128, num_workers=2)\n", "test_set = TensorDataset(X_test, z_test)\n", - "test = DataLoader(test_set, batch_size=256, num_workers=2)\n" + "test = DataLoader(test_set, batch_size=256, num_workers=2)" ] }, { @@ -1184,7 +1185,7 @@ "pl.seed_everything(0)\n", "model = SumNet()\n", "model.total_steps = total_steps\n", - "model.max_lr = 1e-2\n" + "model.max_lr = 1e-2" ] }, { @@ -1204,7 +1205,7 @@ }, "outputs": [], "source": [ - "trainer = pl.Trainer(max_steps=total_steps, gpus=1, benchmark=True)\n" + "trainer = pl.Trainer(max_steps=total_steps, gpus=1, benchmark=True)" ] }, { @@ -1224,7 +1225,7 @@ }, "outputs": [], "source": [ - "trainer.fit(model, train_dataloaders=train, val_dataloaders=test)\n" + "trainer.fit(model, train_dataloaders=train, val_dataloaders=test)" ] }, { @@ -1254,7 +1255,7 @@ "y_for_pysr = torch.sum(y_i_for_pysr, dim=1) / y_i_for_pysr.shape[1]\n", "z_for_pysr = zt[idx] # Use true values.\n", "\n", - "X_for_pysr.shape, y_i_for_pysr.shape\n" + "X_for_pysr.shape, y_i_for_pysr.shape" ] }, { @@ -1287,7 +1288,7 @@ " binary_operators=[\"plus\", \"sub\", \"mult\"],\n", " unary_operators=[\"cos\", \"square\", \"neg\"],\n", ")\n", - "model.fit(X=tmpX[idx2], y=tmpy[idx2])\n" + "model.fit(X=tmpX[idx2], y=tmpy[idx2])" ] }, { @@ -1319,7 +1320,7 @@ }, "outputs": [], "source": [ - "model\n" + "model" ] }, { @@ -1375,9 +1376,7 @@ }, "gpuClass": "standard", "kernelspec": { - "display_name": "Python (main_ipynb)", - "language": "python", - "name": "main_ipynb" + "language": "python" }, "language_info": { "name": "python", diff --git a/pysr/julia_helpers.py b/pysr/julia_helpers.py index 2eafa67c6..b0ec30d24 100644 --- a/pysr/julia_helpers.py +++ b/pysr/julia_helpers.py @@ -65,7 +65,7 @@ def _get_io_arg(quiet): return io_arg -def install(julia_project=None, quiet=False): # pragma: no cover +def install(julia_project=None, quiet=False, precompile=None): # pragma: no cover """ Install PyCall.jl and all required dependencies for SymbolicRegression.jl. @@ -78,17 +78,29 @@ def install(julia_project=None, quiet=False): # pragma: no cover processed_julia_project, is_shared = _process_julia_project(julia_project) _set_julia_project_env(processed_julia_project, is_shared) + if precompile == False: + os.environ["JULIA_PKG_PRECOMPILE_AUTO"] = "0" + julia.install(quiet=quiet) - Main = init_julia(julia_project, quiet=quiet) + Main, init_log = init_julia(julia_project, quiet=quiet, return_aux=True) io_arg = _get_io_arg(quiet) + if precompile is None: + precompile = init_log["compiled_modules"] + + if not precompile: + Main.eval('ENV["JULIA_PKG_PRECOMPILE_AUTO"] = 0') + if is_shared: # Install SymbolicRegression.jl: _add_sr_to_julia_project(Main, io_arg) Main.eval("using Pkg") Main.eval(f"Pkg.instantiate({io_arg})") - Main.eval(f"Pkg.precompile({io_arg})") + + if precompile: + Main.eval(f"Pkg.precompile({io_arg})") + if not quiet: warnings.warn( "It is recommended to restart Python after installing PySR's dependencies," @@ -145,7 +157,7 @@ def _check_for_conflicting_libraries(): # pragma: no cover ) -def init_julia(julia_project=None, quiet=False, julia_kwargs=None): +def init_julia(julia_project=None, quiet=False, julia_kwargs=None, return_aux=False): """Initialize julia binary, turning off compiled modules if needed.""" global julia_initialized global julia_kwargs_at_initialization @@ -183,6 +195,10 @@ def init_julia(julia_project=None, quiet=False, julia_kwargs=None): julia_kwargs = {**julia_kwargs, "compiled_modules": False} Julia(**julia_kwargs) + using_compiled_modules = (not "compiled_modules" in julia_kwargs) or julia_kwargs[ + "compiled_modules" + ] + from julia import Main as _Main Main = _Main @@ -222,6 +238,8 @@ def init_julia(julia_project=None, quiet=False, julia_kwargs=None): julia_kwargs_at_initialization = julia_kwargs julia_initialized = True + if return_aux: + return Main, {"compiled_modules": using_compiled_modules} return Main diff --git a/pysr/version.py b/pysr/version.py index 4793f7df2..32d1262c5 100644 --- a/pysr/version.py +++ b/pysr/version.py @@ -1,2 +1,2 @@ -__version__ = "0.11.12" +__version__ = "0.11.13" __symbolic_regression_jl_version__ = "0.15.0"