diff --git a/src/NeuralNets.jl b/src/NeuralNets.jl index 9fb0060..8b6d9d3 100644 --- a/src/NeuralNets.jl +++ b/src/NeuralNets.jl @@ -4,7 +4,7 @@ using Optim using ArrayViews import Optim:levenberg_marquardt -import Base: show +importall Base # functions diff --git a/src/activations.jl b/src/activations.jl index b14c469..2b5724f 100644 --- a/src/activations.jl +++ b/src/activations.jl @@ -14,13 +14,13 @@ identd(x) = 1 tanhd(x) = sech(x).^2 # dictionary of commonly-used activation derivatives -derivs = Dict{Function, Function}([ +derivs = Dict{Function, Function}(( logis => logisd, logissafe => logissafed, relu => relud, ident => identd, tanh => tanhd - ]) + )) # automatic differentiateion with ForwardDiff.jl # due to limitations of ForwardDiff.jl, this function @@ -29,4 +29,4 @@ function autodiff(activ::Function) f(x) = activ(x[1]) forwarddiff_derivative(x::Float64) = forwarddiff_gradient(f,Float64)([x])[1] return forwarddiff_derivative -end \ No newline at end of file +end