diff --git a/.travis.yml b/.travis.yml index ffa7f08..ced673a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,13 +3,13 @@ os: - linux - osx julia: - - 0.3 - 0.4 + - 0.5 - nightly notifications: email: false -script: - - if [[ -a .git/shallow ]]; then git fetch --unshallow; fi - - julia --check-bounds=yes -e 'Pkg.clone(pwd()); Pkg.build("RegERMs"); Pkg.test("RegERMs"; coverage=true)' +# script: +# - if [[ -a .git/shallow ]]; then git fetch --unshallow; fi +# - julia --check-bounds=yes -e 'Pkg.clone(pwd()); Pkg.build("RegERMs"); Pkg.test("RegERMs"; coverage=true)' after_success: - julia -e 'cd(Pkg.dir("RegERMs")); Pkg.add("Coverage"); using Coverage; Coveralls.submit(Coveralls.process_folder())' diff --git a/Makefile b/Makefile deleted file mode 100644 index be02bbc..0000000 --- a/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -.PHONY: test -test: - julia ./test/runtests.jl diff --git a/REQUIRE b/REQUIRE index dd9971e..4051e8e 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,3 +1,3 @@ -julia 0.3 +julia 0.4 StatsBase 0.6.0 Optim 0.3.1 diff --git a/src/RegERMs.jl b/src/RegERMs.jl index 21dc523..ad2d9bb 100644 --- a/src/RegERMs.jl +++ b/src/RegERMs.jl @@ -1,3 +1,5 @@ +VERSION >= v"0.4.0-dev+6521" && __precompile__() + module RegERMs using StatsBase diff --git a/src/solvers/lbfgs.jl b/src/solvers/lbfgs.jl index 71e72d6..66b9244 100644 --- a/src/solvers/lbfgs.jl +++ b/src/solvers/lbfgs.jl @@ -2,24 +2,27 @@ type LBFGSSolver <: RegressionSolver end function solve(model::RegressionModel, method::RegERM, ::LBFGSSolver, X::AbstractMatrix, y::AbstractVector) function tloss_grad(theta::Vector) - n = size(X,1) + n = size(X,1) - grad_model = gradient(model.f, X[1,:], theta) - grad_loss = derivs(loss(method), values(model, X[1,:], theta), [y[1]]) + grad_model = gradient(model.f, X[1:1,:], theta) + grad_loss = derivs(loss(method), values(model, X[1:1,:], theta), [y[1]]) - total = broadcast(*, grad_loss',grad_model) - for i = 2:n - grad_model = gradient(model.f, X[i,:], theta) - grad_loss = derivs(loss(method), values(model, X[i,:], theta), [y[i]]) + total = broadcast(*, grad_loss',grad_model) + for i = 2:n + grad_model = gradient(model.f, X[i:i,:], theta) + grad_loss = derivs(loss(method), values(model, X[i:i,:], theta), [y[i]]) - total += broadcast(*, grad_loss',grad_model) - end - return vec(total) + total += broadcast(*, grad_loss', grad_model) + end + return vec(total) end reg_grad(theta::Vector) = gradient(regularizer(method, theta)) - obj(theta::Vector) = tloss(loss(method), values(model, method.X, theta), method.y) + value(regularizer(method, theta)) + obj(theta::Vector) = + tloss(loss(method), values(model, method.X, theta), method.y) + value(regularizer(method, theta)) grad!(theta::Vector, storage::Vector) = storage[:] = tloss_grad(theta) + reg_grad(theta) - Optim.optimize(obj, grad!, model.theta, method=:l_bfgs, linesearch! = Optim.interpolating_linesearch!).minimum -end \ No newline at end of file + Optim.optimize(obj, grad!, + model.theta, + method=LBFGS(; linesearch! = Optim.interpolating_linesearch!)).minimum +end diff --git a/test/runtests.jl b/test/runtests.jl index 8a7b1a8..39c74ca 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,5 +1,3 @@ -push!(LOAD_PATH, "src") - using RegERMs, Base.Test @@ -18,4 +16,4 @@ for t in tests test_fn = "$t.jl" println("* $test_fn") include(test_fn) -end \ No newline at end of file +end