Skip to content

Commit

Permalink
Merge pull request #50 from JuliaML/auto-juliaformatter-pr
Browse files Browse the repository at this point in the history
[AUTO] JuliaFormatter.jl run
  • Loading branch information
juliohm committed May 15, 2023
2 parents b7c41d9 + 32223de commit 7d2aa83
Show file tree
Hide file tree
Showing 19 changed files with 106 additions and 129 deletions.
25 changes: 14 additions & 11 deletions src/DensityRatioEstimation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,25 +36,25 @@ include("kmm/julia.jl")
using Requires
function __init__()
# KMM
@require JuMP="4076af6c-e467-56ae-b986-b466b2749572" begin
@require Ipopt="b6b21f68-93f8-5de0-b562-5493be1d77c9" include("kmm/jump.jl")
@require JuMP = "4076af6c-e467-56ae-b986-b466b2749572" begin
@require Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" include("kmm/jump.jl")
end

# KLIEP
@require Optim="429524aa-4258-5aef-a3af-852621145aeb" include("kliep/optim.jl")
@require Convex="f65535da-76fb-5f13-bab9-19810c17039a" begin
@require ECOS="e2685f51-7e38-5353-a97d-a921fd2c8199" include("kliep/convex.jl")
@require Optim = "429524aa-4258-5aef-a3af-852621145aeb" include("kliep/optim.jl")
@require Convex = "f65535da-76fb-5f13-bab9-19810c17039a" begin
@require ECOS = "e2685f51-7e38-5353-a97d-a921fd2c8199" include("kliep/convex.jl")
end

# LSIF
@require Optim="429524aa-4258-5aef-a3af-852621145aeb" include("lsif/optim.jl")
@require JuMP="4076af6c-e467-56ae-b986-b466b2749572" begin
@require Ipopt="b6b21f68-93f8-5de0-b562-5493be1d77c9" include("lsif/jump.jl")
@require Optim = "429524aa-4258-5aef-a3af-852621145aeb" include("lsif/optim.jl")
@require JuMP = "4076af6c-e467-56ae-b986-b466b2749572" begin
@require Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" include("lsif/jump.jl")
end

# AD and GPU libs
@require Zygote="e88e6eb3-aa80-5325-afca-941959d7151f" include("utils/zygote.jl")
@require CuArrays="3a865a2d-5b23-5a0f-bc46-62713ec82fae" include("utils/cuarrays.jl")
@require Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" include("utils/zygote.jl")
@require CuArrays = "3a865a2d-5b23-5a0f-bc46-62713ec82fae" include("utils/cuarrays.jl")
end

export
Expand All @@ -67,7 +67,10 @@ export

# estimators
DensityRatioEstimator,
KMM, uKMM, KLIEP, LSIF,
KMM,
uKMM,
KLIEP,
LSIF,
available_optlib,
default_optlib,
densratiofunc,
Expand Down
26 changes: 8 additions & 18 deletions src/api/estimators.jl
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ the list below:
See also [`densratiofunc`](@ref).
"""
densratio(x_nu, x_de, dre::DensityRatioEstimator;
optlib=default_optlib(typeof(dre))) =
densratio(x_nu, x_de, dre::DensityRatioEstimator; optlib=default_optlib(typeof(dre))) =
_densratio(x_nu, x_de, dre, optlib)

"""
Expand All @@ -44,8 +43,7 @@ See also [`densratio`](@ref).
Only some estimators define a ratio function that can
be evaluated outside `x_de`.
"""
densratiofunc(x_nu, x_de, dre::DensityRatioEstimator;
optlib=default_optlib(typeof(dre))) =
densratiofunc(x_nu, x_de, dre::DensityRatioEstimator; optlib=default_optlib(typeof(dre))) =
_densratiofunc(x_nu, x_de, dre, optlib)

"""
Expand All @@ -55,32 +53,24 @@ Return default optimization library for density ratio
estimator `dre`. The function can also be called on the
type `typeof(dre)`.
"""
default_optlib(dre::DensityRatioEstimator) =
default_optlib(typeof(dre))
default_optlib(dre::DensityRatioEstimator) = default_optlib(typeof(dre))

"""
available_optlib(dre)
Return list of implementations available via different
optimization frameworks.
"""
available_optlib(dre::DensityRatioEstimator) =
available_optlib(typeof(dre))
available_optlib(dre::DensityRatioEstimator) = available_optlib(typeof(dre))

###################################################
## functions to be implemented by new estimators ##
###################################################

_densratio(x_nu, x_de, dre::DensityRatioEstimator,
optlib::Type{OptimizationLibrary}) =
@error "not implemented"
_densratio(x_nu, x_de, dre::DensityRatioEstimator, optlib::Type{OptimizationLibrary}) = @error "not implemented"

_densratiofunc(x_nu, x_de, dre::DensityRatioEstimator,
optlib::Type{OptimizationLibrary}) =
@error "not implemented"
_densratiofunc(x_nu, x_de, dre::DensityRatioEstimator, optlib::Type{OptimizationLibrary}) = @error "not implemented"

default_optlib(dre::Type{DensityRatioEstimator}) =
@error "not implemented"
default_optlib(dre::Type{DensityRatioEstimator}) = @error "not implemented"

available_optlib(dre::Type{DensityRatioEstimator}) =
@error "not implemented"
available_optlib(dre::Type{DensityRatioEstimator}) = @error "not implemented"
6 changes: 2 additions & 4 deletions src/api/fitters.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,12 @@ Hyperparameter tuning is not defined for all
density ratio estimators. Therefore, this
function may not work with some estimators.
"""
fit(dre::Type{<:DensityRatioEstimator}, x_nu, x_de,
fitter::EstimatorFitter; optlib=default_optlib(dre)) =
fit(dre::Type{<:DensityRatioEstimator}, x_nu, x_de, fitter::EstimatorFitter; optlib=default_optlib(dre)) =
_fit(dre, x_nu, x_de, fitter, optlib)

################################################
## functions to be implemented by new fitters ##
################################################

_fit(dre::Type{DensityRatioEstimator}, x_nu, x_de,
fitter::EstimatorFitter, optlib::Type{OptimizationLibrary}) =
_fit(dre::Type{DensityRatioEstimator}, x_nu, x_de, fitter::EstimatorFitter, optlib::Type{OptimizationLibrary}) =
@error "not implemented"
6 changes: 3 additions & 3 deletions src/api/optlibs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ An optimization library (e.g. Optim.jl, Convex.jl, JuMP.jl).
abstract type OptimizationLibrary end

# optimization libraries for dispatch
struct JuliaLib <: OptimizationLibrary end
struct OptimLib <: OptimizationLibrary end
struct JuliaLib <: OptimizationLibrary end
struct OptimLib <: OptimizationLibrary end
struct ConvexLib <: OptimizationLibrary end
struct JuMPLib <: OptimizationLibrary end
struct JuMPLib <: OptimizationLibrary end
18 changes: 7 additions & 11 deletions src/kliep.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,24 +22,22 @@ Kullback-Leibler importance estimation procedure (KLIEP).
* Júlio Hoffimann ([email protected])
"""
@with_kw struct KLIEP{T,RNG} <: DensityRatioEstimator
σ::T=2.0
b::Int=10
rng::RNG=Random.GLOBAL_RNG
σ::T = 2.0
b::Int = 10
rng::RNG = Random.GLOBAL_RNG
end

default_optlib(dre::Type{<:KLIEP}) = OptimLib

available_optlib(dre::Type{<:KLIEP}) = [OptimLib, ConvexLib]

function _densratio(x_nu, x_de, dre::KLIEP,
optlib::Type{<:OptimizationLibrary})
function _densratio(x_nu, x_de, dre::KLIEP, optlib::Type{<:OptimizationLibrary})
K_nu, K_de, x_ba = _kliep_consts(x_nu, x_de, dre)
α = _kliep_coeffs(K_nu, K_de, dre, optlib)
K_de*α
K_de * α
end

function _densratiofunc(x_nu, x_de, dre::KLIEP,
optlib::Type{<:OptimizationLibrary})
function _densratiofunc(x_nu, x_de, dre::KLIEP, optlib::Type{<:OptimizationLibrary})
K_nu, K_de, x_ba = _kliep_consts(x_nu, x_de, dre)
α = _kliep_coeffs(K_nu, K_de, dre, optlib)
function r(x)
Expand All @@ -62,6 +60,4 @@ end
Return the coefficients of KLIEP basis expansion.
"""
_kliep_coeffs(K_nu, K_de, dre::KLIEP,
optlib::Type{<:OptimizationLibrary}) =
@error "not implemented"
_kliep_coeffs(K_nu, K_de, dre::KLIEP, optlib::Type{<:OptimizationLibrary}) = @error "not implemented"
7 changes: 4 additions & 3 deletions src/kliep/convex.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@ function _kliep_coeffs(K_nu, K_de, dre::KLIEP, optlib::Type{ConvexLib})
k = sum(K_de, dims=1)

# objective function and constraints
α = Convex.Variable(b); w = K*α
objective = sum(log(w[i]) for i in 1:n_nu)
constraints = 0, dot(α,k) == n_de]
α = Convex.Variable(b)
w = K * α
objective = sum(log(w[i]) for i in 1:n_nu)
constraints = 0, dot(α, k) == n_de]

# optimization problem
problem = Convex.maximize(objective, constraints)
Expand Down
22 changes: 11 additions & 11 deletions src/kliep/optim.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,37 +19,37 @@ function _kliep_coeffs(K_nu, K_de, dre::KLIEP, optlib::Type{OptimLib})
lc = uc = [n_de]

# constants for inequality constraints
T = eltype(K_de)
T = eltype(K_de)
lx = fill(zero(T), b)
ux = fill(Inf, b)

# objective
f(α) = -sum(log, K*α)
f(α) = -sum(log, K * α)
function ∇f!(g, α)
p = K*α
p = K * α
for l in 1:b
g[l] = -sum(K[j,l] / p[j] for j in 1:n_nu)
g[l] = -sum(K[j, l] / p[j] for j in 1:n_nu)
end
end
function ∇²f!(h, α)
p = K*α
p = K * α
for k in 1:b, l in 1:b
h[k,l] = sum(view(K,:,k) .* view(K,:,l) ./ p)
h[k, l] = sum(view(K, :, k) .* view(K, :, l) ./ p)
end
end

# equality constraint
c!(c, α) = c .= A*α
J!(J, α) = J .= A
c!(c, α) = c .= A * α
J!(J, α) = J .= A
H!(H, α, λ) = H .+= 0

# initial guess
αₒ = fill(n_de/sum(A), b)
αₒ = fill(n_de / sum(A), b)

# optimization problem
objective = TwiceDifferentiable(f, ∇f!, ∇²f!, αₒ)
objective = TwiceDifferentiable(f, ∇f!, ∇²f!, αₒ)
constraints = TwiceDifferentiableConstraints(c!, J!, H!, lx, ux, lc, uc)
initguess = αₒ
initguess = αₒ

# solve problem with interior-point primal-dual Newton
solution = optimize(objective, constraints, initguess, IPNewton())
Expand Down
15 changes: 7 additions & 8 deletions src/kmm.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@ function _kmm_consts(x_nu, x_de, dre::AbstractKMM)
Kdede, typeof(σ)(n_de / n_nu) * sum(Kdenu, dims=2)
end

function _densratio(x_nu, x_de, dre::AbstractKMM,
optlib::Type{<:OptimizationLibrary})
function _densratio(x_nu, x_de, dre::AbstractKMM, optlib::Type{<:OptimizationLibrary})
K, κ = _kmm_consts(x_nu, x_de, dre)
_kmm_ratios(K, κ, dre, optlib)
end
Expand All @@ -47,8 +46,8 @@ Unconstrained Kernel Mean Matching (KMM).
* Kai Xu ([email protected])
"""
@with_kw struct uKMM{T} <: AbstractKMM
σ::T=2.0
λ::T=0.001
σ::T = 2.0
λ::T = 0.001
end

default_optlib(dre::Type{<:uKMM}) = JuliaLib
Expand Down Expand Up @@ -78,10 +77,10 @@ Kernel Mean Matching (KMM).
* Kai Xu ([email protected])
"""
@with_kw struct KMM{T} <: AbstractKMM
σ::T=2.0
B::T=Inf
ϵ::T=0.01
λ::T=0.001
σ::T = 2.0
B::T = Inf
ϵ::T = 0.01
λ::T = 0.001
end

default_optlib(dre::Type{<:KMM}) = JuMPLib
Expand Down
4 changes: 2 additions & 2 deletions src/kmm/jump.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ function _kmm_jump_model(K, κ, dre::AbstractKMM, optlib::Type{JuMPLib})
# optimization problem
model = Model(optimizer_with_attributes(Ipopt.Optimizer, "print_level" => 0, "sb" => "yes"))
@variable(model, β[1:m])
@objective(model, Min, (1/2) * dot(β, K*β - 2κ))
@objective(model, Min, (1 / 2) * dot(β, K * β - 2κ))

return model, β
end
Expand All @@ -38,7 +38,7 @@ function _kmm_ratios(K, κ, dre::KMM, optlib::Type{JuMPLib})
# adding constriants
@constraint(model, 0 .≤ β)
isinf(B) || @constraint(model, β .≤ B)
@constraint(model, (1-ϵ) mean(β) (1+ϵ))
@constraint(model, (1 - ϵ) mean(β) (1 + ϵ))

# solve the problem
optimize!(model)
Expand Down
10 changes: 4 additions & 6 deletions src/lcv.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,11 @@ end

LCV(ranges::NamedTuple) = LCV(ranges, 10)

function _fit(::Type{<:KLIEP}, x_nu, x_de,
fitter::EstimatorFitter,
optlib::Type{<:OptimizationLibrary})
function _fit(::Type{<:KLIEP}, x_nu, x_de, fitter::EstimatorFitter, optlib::Type{<:OptimizationLibrary})
# retrieve parameters
ranges = fitter.ranges
nfolds = fitter.nfolds
npts = length(x_nu)
npts = length(x_nu)

@assert nfolds npts "number of folds must be smaller than number of numerator samples"

Expand All @@ -48,8 +46,8 @@ function _fit(::Type{<:KLIEP}, x_nu, x_de,
# estimate loss with cross-validation
Ĵₖ = map(1:nfolds) do k
# training and hold-out samples
train = [ind for i in vcat(1:k-1, k+1:nfolds) for ind in folds[i]]
hold = folds[k]
train = [ind for i in vcat(1:(k - 1), (k + 1):nfolds) for ind in folds[i]]
hold = folds[k]

# perform estimation with training samples
r = densratiofunc(x_nu[train], x_de, dre, optlib=optlib)
Expand Down
21 changes: 9 additions & 12 deletions src/lsif.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,25 +23,23 @@ Importance Estimation
* Júlio Hoffimann ([email protected])
"""
@with_kw struct LSIF{T,RNG} <: DensityRatioEstimator
σ::T=2.0
b::Int=10
λ::T=0.001
rng::RNG=Random.GLOBAL_RNG
σ::T = 2.0
b::Int = 10
λ::T = 0.001
rng::RNG = Random.GLOBAL_RNG
end

default_optlib(dre::Type{<:LSIF}) = OptimLib

available_optlib(dre::Type{<:LSIF}) = [OptimLib, JuMPLib]

function _densratio(x_nu, x_de, dre::LSIF,
optlib::Type{<:OptimizationLibrary})
function _densratio(x_nu, x_de, dre::LSIF, optlib::Type{<:OptimizationLibrary})
K_de, H, h, x_ba = _lsif_consts(x_nu, x_de, dre)
α = _lsif_coeffs(H, h, dre, optlib)
K_de*α
K_de * α
end

function _densratiofunc(x_nu, x_de, dre::LSIF,
optlib::Type{<:OptimizationLibrary})
function _densratiofunc(x_nu, x_de, dre::LSIF, optlib::Type{<:OptimizationLibrary})
K_de, H, h, x_ba = _lsif_consts(x_nu, x_de, dre)
α = _lsif_coeffs(H, h, dre, optlib)
function r(x)
Expand All @@ -61,7 +59,7 @@ function _lsif_consts(x_nu, x_de, dre)
φ′ = view(K_de, :, l′)
for l in 1:l′
φ = view(K_de, :, l)
Φ[l,l′] = mean.* φ′)
Φ[l, l′] = mean.* φ′)
end
end
H = Symmetric(Φ)
Expand All @@ -75,5 +73,4 @@ end
Return the coefficients of LSIF basis expansion.
"""
_lsif_coeffs(H, h, dre::LSIF, optlib::Type{<:OptimizationLibrary}) =
@error "not implemented"
_lsif_coeffs(H, h, dre::LSIF, optlib::Type{<:OptimizationLibrary}) = @error "not implemented"
2 changes: 1 addition & 1 deletion src/lsif/jump.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ function _lsif_coeffs(H, h, dre::LSIF, optlib::Type{JuMPLib})

model = Model(optimizer_with_attributes(Ipopt.Optimizer, "print_level" => 0, "sb" => "yes"))
@variable(model, α[1:b])
@objective(model, Min, (1/2) * dot(α, H*α - 2h) + λ * sum(α))
@objective(model, Min, (1 / 2) * dot(α, H * α - 2h) + λ * sum(α))
@constraint(model, α .≥ 0)

# solve the problem
Expand Down
Loading

0 comments on commit 7d2aa83

Please sign in to comment.