From 0befe99a3460dda9700652b598cf7acc0aff353f Mon Sep 17 00:00:00 2001 From: Vaibhav Dixit Date: Thu, 19 Sep 2024 15:52:55 -0400 Subject: [PATCH] rem multistartopt from docs project fro now --- .github/workflows/Documentation.yml | 4 ++-- docs/Project.toml | 2 -- docs/src/optimization_packages/multistartoptimization.md | 4 ++-- docs/src/optimization_packages/optimization.md | 2 +- docs/src/tutorials/minibatch.md | 8 ++++---- 5 files changed, 9 insertions(+), 11 deletions(-) diff --git a/.github/workflows/Documentation.yml b/.github/workflows/Documentation.yml index 09bb1b3b7..46f1e4ea8 100644 --- a/.github/workflows/Documentation.yml +++ b/.github/workflows/Documentation.yml @@ -16,7 +16,7 @@ jobs: with: version: '1' - name: Install dependencies - run: julia --project=docs/ -e 'using Pkg; Pkg.develop(vcat(PackageSpec(path = pwd()), [PackageSpec(path = joinpath("lib", dir)) for dir in readdir("lib") if dir !== "OptimizationQuadDIRECT"])); Pkg.instantiate()' + run: julia --project=docs/ -e 'using Pkg; Pkg.develop(vcat(PackageSpec(path = pwd()), [PackageSpec(path = joinpath("lib", dir)) for dir in readdir("lib") if (dir !== "OptimizationQuadDIRECT" || dir !== "OptimizationMultistartOptimization")])); Pkg.instantiate()' - name: Build and deploy env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # For authentication with GitHub Actions token @@ -24,7 +24,7 @@ jobs: run: julia --project=docs/ --code-coverage=user docs/make.jl - uses: julia-actions/julia-processcoverage@v1 with: - directories: src,lib/OptimizationBBO/src,lib/OptimizationCMAEvolutionStrategy/src,lib/OptimizationEvolutionary/src,lib/OptimizationFlux/src,lib/OptimizationGCMAES/src,lib/OptimizationMOI/src,lib/OptimizationMetaheuristics/src,lib/OptimizationMultistartOptimization/src,lib/OptimizationNLopt/src,lib/OptimizationNOMAD/src,lib/OptimizationOptimJL/src,lib/OptimizationOptimisers/src,lib/OptimizationPolyalgorithms/src,lib/OptimizationQuadDIRECT/src,lib/OptimizationSpeedMapping/src + directories: src,lib/OptimizationBBO/src,lib/OptimizationCMAEvolutionStrategy/src,lib/OptimizationEvolutionary/src,lib/OptimizationGCMAES/src,lib/OptimizationMOI/src,lib/OptimizationMetaheuristics/src,lib/OptimizationMultistartOptimization/src,lib/OptimizationNLopt/src,lib/OptimizationNOMAD/src,lib/OptimizationOptimJL/src,lib/OptimizationOptimisers/src,lib/OptimizationPolyalgorithms/src,lib/OptimizationQuadDIRECT/src,lib/OptimizationSpeedMapping/src - uses: codecov/codecov-action@v4 with: file: lcov.info diff --git a/docs/Project.toml b/docs/Project.toml index 860a270d7..207aa29df 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -24,7 +24,6 @@ OptimizationGCMAES = "6f0a0517-dbc2-4a7a-8a20-99ae7f27e911" OptimizationMOI = "fd9f6733-72f4-499f-8506-86b2bdd0dea1" OptimizationManopt = "e57b7fff-7ee7-4550-b4f0-90e9476e9fb6" OptimizationMetaheuristics = "3aafef2f-86ae-4776-b337-85a36adf0b55" -OptimizationMultistartOptimization = "e4316d97-8bbb-4fd3-a7d8-3851d2a72823" OptimizationNLPModels = "064b21be-54cf-11ef-1646-cdfee32b588f" OptimizationNLopt = "4e6fcdb7-1186-4e1f-a706-475e75c168bb" OptimizationNOMAD = "2cab0595-8222-4775-b714-9828e6a9e01b" @@ -67,7 +66,6 @@ OptimizationGCMAES = "0.3" OptimizationMOI = "0.5" OptimizationManopt = "0.0.4" OptimizationMetaheuristics = "0.3" -OptimizationMultistartOptimization = "0.3" OptimizationNLPModels = "0.0.2" OptimizationNLopt = "0.3" OptimizationNOMAD = "0.3" diff --git a/docs/src/optimization_packages/multistartoptimization.md b/docs/src/optimization_packages/multistartoptimization.md index aee313425..4f801e64f 100644 --- a/docs/src/optimization_packages/multistartoptimization.md +++ b/docs/src/optimization_packages/multistartoptimization.md @@ -31,7 +31,7 @@ constraint equations. However, lower and upper constraints set by `lb` and `ub` The Rosenbrock function can be optimized using `MultistartOptimization.TikTak()` with 100 initial points and the local method `NLopt.LD_LBFGS()` as follows: -```@example MultiStart +```julia using Optimization, OptimizationMultistartOptimization, OptimizationNLopt rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 x0 = zeros(2) @@ -43,7 +43,7 @@ sol = solve(prob, MultistartOptimization.TikTak(100), NLopt.LD_LBFGS()) You can use any `Optimization` optimizers you like. The global method of the `MultistartOptimization` is a positional argument and followed by the local method. For example, we can perform a multistartoptimization with LBFGS as the optimizer using either the `NLopt.jl` or `Optim.jl` implementation as follows. Moreover, this interface allows you to access and adjust all the optimizer settings as you normally would: -```@example MultiStart +```julia using OptimizationOptimJL f = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff()) prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, 1.0]) diff --git a/docs/src/optimization_packages/optimization.md b/docs/src/optimization_packages/optimization.md index 7cd469f7d..66d108653 100644 --- a/docs/src/optimization_packages/optimization.md +++ b/docs/src/optimization_packages/optimization.md @@ -89,4 +89,4 @@ optf = OptimizationFunction(loss, AutoZygote()) prob = OptimizationProblem(optf, ps_ca, data) res = Optimization.solve(prob, Optimization.Sophia(), callback = callback) -``` \ No newline at end of file +``` diff --git a/docs/src/tutorials/minibatch.md b/docs/src/tutorials/minibatch.md index 70b513814..3604d78f6 100644 --- a/docs/src/tutorials/minibatch.md +++ b/docs/src/tutorials/minibatch.md @@ -67,13 +67,13 @@ k = 10 train_loader = MLUtils.DataLoader((ode_data, t), batchsize = k) numEpochs = 300 -l1 = loss_adjoint(pp, train_loader.data[1], train_loader.data[2])[1] +l1 = loss_adjoint(pp, train_loader.data)[1] optfun = OptimizationFunction( loss_adjoint, Optimization.AutoZygote()) -optprob = OptimizationProblem(optfun, pp) +optprob = OptimizationProblem(optfun, ps_ca, train_loader) using IterTools: ncycle -res1 = Optimization.solve(optprob, Optimisers.ADAM(0.05), ncycle(train_loader, numEpochs), - callback = callback) +res1 = Optimization.solve( + optprob, Optimisers.ADAM(0.05); callback = callback, epochs = 1000) ```