From 999bc11d2e438e77de5bba98edfbac095b26d497 Mon Sep 17 00:00:00 2001 From: Colin Caine Date: Mon, 13 May 2019 21:07:31 +0100 Subject: [PATCH] Update examples/crossval.jl Begone, bitrot. --- examples/crossval.jl | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/examples/crossval.jl b/examples/crossval.jl index d93ba5f..77984aa 100644 --- a/examples/crossval.jl +++ b/examples/crossval.jl @@ -5,19 +5,20 @@ # of the RMSE (root-mean-square-error) evaluated on the testing set # +using Printf: @printf using MLBase # functions function compute_center(X::Matrix{Float64}) - c = vec(mean(X, 2)) + c = vec(mean(X; dims=2)) @printf("training on %d samples => (%.4f, %.4f)\n", size(X,2), c[1], c[2]) return c end function compute_rmse(c::Vector{Float64}, X::Matrix{Float64}) - v = sqrt(mean(sum(abs2(X .- c),1))) + v = sqrt(mean(sum(abs2.(X .- c); dims=1))) @printf("RMSE on test set: %.6f\n\n", v) return v end @@ -29,14 +30,12 @@ const data = [2., 3.] .+ randn(2, n) # cross validation -(c, v, inds) = cross_validate( +scores = cross_validate( inds -> compute_center(data[:, inds]), # training function (c, inds) -> compute_rmse(c, data[:, inds]), # evaluation function n, # total number of samples - Kfold(n, 5), # cross validation plan: 5-fold - Reverse) # smaller score indicates better model + Kfold(n, 5)) # cross validation plan: 5-fold # display results -@printf("best model = (%.4f, %.4f), score = %.6f\n", c[1], c[2], v) - +@show scores