Skip to content

Commit

Permalink
Merge pull request #250 from Julia-Tempering/miguelbiron-patch-1
Browse files Browse the repository at this point in the history
drop a couple of dependencies we are not using
  • Loading branch information
miguelbiron authored Jul 5, 2024
2 parents 822dc82 + 22ba62f commit 4ba4263
Show file tree
Hide file tree
Showing 11 changed files with 41 additions and 58 deletions.
17 changes: 5 additions & 12 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,11 @@ authors = ["Alexandre Bouchard-Côté <[email protected]>, Nikola Surjanovic
version = "0.4.2"

[deps]
ConcreteStructs = "2569d6c7-a4a2-43d3-a901-331e8e4be471"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
Expect = "6a31a4e8-6e70-5a2d-b005-bc2d500d80a5"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6"
Interpolations = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
Expand All @@ -22,10 +19,9 @@ LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688"
MPI = "da04e1cc-30fd-572f-bb4f-1f8673147195"
MPIPreferences = "3da0fdf6-3ccc-4f1b-acd9-58baa6c99267"
MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
OnlineStats = "a15396b6-48d5-5d58-9928-6d29437db91e"
OnlineStatsBase = "925886fa-5bf2-5e8e-b522-a9147a512338"
OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Preferences = "21216c6a-2e73-6563-6e65-726566657250"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01"
Expand All @@ -34,7 +30,7 @@ Roots = "f2b01f46-fcfa-551c-844a-d8ac1e96c665"
Serialization = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b"
SplittableRandoms = "8efc31e9-3fb0-4277-b18c-5a3d5d07abad"
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
StaticArraysCore = "1e83bf80-4336-4d27-bf5d-d5a4f845583c"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"
ZipFile = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
Expand All @@ -53,14 +49,11 @@ PigeonsMCMCChainsExt = "MCMCChains"

[compat]
BridgeStan = "2"
ConcreteStructs = "0.2"
DataFrames = "1"
DataStructures = "0.18"
Distributions = "0.25"
DocStringExtensions = "0.9"
DynamicPPL = "0.23, 0.24, 0.25, 0.26, 0.27, 0.28"
Expect = "0.3"
ForwardDiff = "0.10"
Graphs = "1"
HypothesisTests = "0.11"
Interpolations = "0.14, 0.15"
Expand All @@ -72,15 +65,14 @@ MCMCChains = "6"
MPI = "0.20"
MPIPreferences = "0.1"
MacroTools = "0.5"
OnlineStats = "1"
OnlineStatsBase = "1"
OrderedCollections = "1"
Preferences = "1"
RecipesBase = "1"
Requires = "1"
Roots = "2"
SpecialFunctions = "2"
SplittableRandoms = "0.1"
StaticArrays = "1"
StaticArraysCore = "1"
Statistics = "1"
StatsBase = "0.33, 0.34"
ZipFile = "0.10"
Expand All @@ -89,4 +81,5 @@ julia = "1.8"
[extras]
BridgeStan = "c88b6f0a-829e-4b0b-94b7-f06ab5908f5a"
DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8"
HypothesisTests = "09f84164-cd44-5f33-b23f-e6b0d136a0d5"
MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d"
1 change: 1 addition & 0 deletions docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c"
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"
MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d"
OnlineStats = "a15396b6-48d5-5d58-9928-6d29437db91e"
Expand Down
6 changes: 4 additions & 2 deletions docs/src/input-julia.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,12 +95,14 @@ nothing # hide

## Changing the explorer

Here is an example using [`AutoMALA`](@ref) instead of the default
[`SliceSampler`](@ref). We only need to add methods to make
Here is an example using [`AutoMALA`](@ref)—a gradient-based sampler—instead of the default
[`SliceSampler`](@ref). For simplicity, we'll use the ForwardDiff backend; many others are supported by the [LogDensityProblemsAD.jl](https://github.com/tpapp/LogDensityProblemsAD.jl) interface.
We only need to add methods to make
our custom type `MyLogPotential` conform the
[LogDensityProblems interface](https://github.com/tpapp/LogDensityProblems.jl):

```@example julia
using ForwardDiff
using LogDensityProblems
LogDensityProblems.dimension(lp::MyLogPotential) = 2
Expand Down
2 changes: 1 addition & 1 deletion ext/PigeonsBridgeStanExt/interface.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ The `data` argument can be a path with a file with `.json` suffix or the json st
See `BridgeStan` for details.
"""
function Pigeons.StanLogPotential(stan_file, data, extra_information = nothing)
model = BridgeStan.StanModel(; stan_file, data, make_args = stan_threads_options())
model = BridgeStan.StanModel(stan_file, data; make_args = stan_threads_options())
result = StanLogPotential(
model,
stan_file,
Expand Down
57 changes: 22 additions & 35 deletions src/Pigeons.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,52 +10,39 @@ import MPI: Comm, Allreduce, Comm_rank,
Allgather, Comm_split, isend, recv,
bcast, tag_ub


using Base: Forward
using DataFrames
using Distributions
using StatsBase
using Interpolations
using Roots
using Dates
using OnlineStats
using MacroTools
using Distributions
using DocStringExtensions
using LinearAlgebra
using SpecialFunctions
using Serialization
using ConcreteStructs
using Random
using Graphs
using DataStructures
using Preferences
using MPIPreferences
using Expect
using Graphs
using Interpolations
using JSON
using LinearAlgebra
using LogDensityProblems
using LogDensityProblemsAD
using LogExpFunctions
using StaticArrays
using MPIPreferences
using MacroTools
using OnlineStatsBase
using OrderedCollections
using Printf
using Statistics
using Random
using RecipesBase
using Roots
using Serialization
using SpecialFunctions: beta
using StaticArraysCore
using Statistics
using StatsBase
using ZipFile
using ForwardDiff
using LogDensityProblems
using LogDensityProblemsAD
using JSON

import Serialization.serialize
import Serialization.deserialize
import Base.@kwdef
import Base.show
import Base.print
import Base: Forward, @kwdef, show, print, merge, keys
import Base.Threads.@threads
import OnlineStats._fit!
import OnlineStats.value
import OnlineStats._merge!
import OnlineStatsBase: _fit!, value, _merge!
import Random.rand!
import Base.keys
import Statistics.mean
import Statistics.var
import Base.merge
import Serialization: serialize, deserialize
import Statistics: mean, var


const use_auto_exec_folder = "use_auto_exec_folder"
Expand Down
2 changes: 1 addition & 1 deletion src/paths/ScaledPrecisionNormalPath.jl
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ Known cumulative barrier used for testing,
from [Predescu et al., 2003](https://aip.scitation.org/doi/10.1063/1.1644093).
"""
function analytic_cumulativebarrier(path::ScaledPrecisionNormalPath)
b = beta(path.dim / 2.0, path.dim / 2.0)
b = beta(path.dim / 2.0, path.dim / 2.0) # NB: this is the beta function in SpecialFunctions.jl
function cumulativebarrier(beta)
sigma0 = 1.0 / sqrt(path.precision0)
sigmab = 1.0 / sqrt(precision(path, beta))
Expand Down
6 changes: 3 additions & 3 deletions src/recorders/LogSum.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@ end

LogSum(T::Type{<:Number} = Float64) = LogSum(-inf(T), 0)

OnlineStats.value(stat::LogSum, args...; kw...) = stat.value
OnlineStatsBase.value(stat::LogSum, args...; kw...) = stat.value

function OnlineStats._fit!(stat::LogSum, y)
function OnlineStatsBase._fit!(stat::LogSum, y)
stat.value = LogExpFunctions.logaddexp(stat.value, y)
stat.n += 1
end

function _merge!(stat1::LogSum, stat2::LogSum)
function OnlineStatsBase._merge!(stat1::LogSum, stat2::LogSum)
stat1.value = LogExpFunctions.logaddexp(stat1.value, stat2.value)
stat1.n += stat2.n
end
Expand Down
2 changes: 1 addition & 1 deletion src/recorders/recorder.jl
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ recorder_values(pt::PT, recorder_name::Symbol) =
recorder_values(pt.reduced_recorders, recorder_name::Symbol)
recorder_values(reduced_recorders, recorder_name::Symbol) =
recorder_values(reduced_recorders[recorder_name])
recorder_values(recorder::GroupBy) = (value(v) for v in values(value(recorder)))
recorder_values(recorder::OnlineStatsBase.GroupBy) = (value(v) for v in values(value(recorder)))

"""
Average MH swap acceptance probabilities for each pairs
Expand Down
2 changes: 1 addition & 1 deletion src/utils/@auto.jl
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ macro auto(expr)
end


# Parse whole struct definition for the @concrete macro
# Parse whole struct definition for the @auto macro
function _concretize(expr)
expr isa Expr && expr.head == :struct || error("Invalid usage of @auto")

Expand Down
2 changes: 1 addition & 1 deletion test/test_DistributionLogPotential.jl
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ end
n_local_mpi_processes = n_mpis,
n_threads = 1,
mpiexec_args = extra_mpi_args(),
dependencies = [Bijectors,BridgeStan]
dependencies = [Bijectors,BridgeStan,ForwardDiff]
)
)
@test true
Expand Down
2 changes: 1 addition & 1 deletion test/test_src_sortable.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
@testset "src-sortable" begin
cd(dirname(dirname(pathof(Pigeons)))) do
@assert length(Pigeons.sort_includes("Pigeons.jl")) > 1
@test length(Pigeons.sort_includes("Pigeons.jl")) > 1
end
end

0 comments on commit 4ba4263

Please sign in to comment.