Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 16 additions & 4 deletions .buildkite/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,16 @@ steps:
- "test/"
- "Project.toml"
- ".buildkite/testing.yml"
- "lib/"
- "lib/LuxTestUtils/src/"
- "lib/LuxTestUtils/ext/"
- "lib/LuxCore/src/"
- "lib/LuxCore/ext/"
- "lib/MLDataDevices/src/"
- "lib/MLDataDevices/ext/"
- "lib/WeightInitializers/src/"
- "lib/WeightInitializers/ext/"
- "lib/LuxLib/src/"
- "lib/LuxLib/ext/"
config:
command: "buildkite-agent pipeline upload .buildkite/testing.yml"
agents:
Expand Down Expand Up @@ -52,9 +61,12 @@ steps:
path:
- "lib/LuxLib/"
- ".buildkite/testing_luxlib.yml"
- "lib/LuxTestUtils/"
- "lib/LuxCore/"
- "lib/MLDataDevices/"
- "lib/LuxTestUtils/src/"
- "lib/LuxTestUtils/ext/"
- "lib/LuxCore/src/"
- "lib/LuxCore/ext/"
- "lib/MLDataDevices/src/"
- "lib/MLDataDevices/ext/"
config:
command: "buildkite-agent pipeline upload .buildkite/testing_luxlib.yml"
agents:
Expand Down
8 changes: 4 additions & 4 deletions .buildkite/testing_mldatadevices.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ steps:
version: "{{matrix.julia}}"
- JuliaCI/julia-test#v1:
project: "lib/MLDataDevices"
test_args: "BACKEND_GROUP={{matrix.group}}"
test_args: "--BACKEND_GROUP={{matrix.group}}"
- JuliaCI/julia-coverage#v1:
codecov: true
dirs:
Expand All @@ -34,7 +34,7 @@ steps:
# version: "{{matrix.julia}}"
# - JuliaCI/julia-test#v1:
# project: "lib/MLDataDevices"
# test_args: "BACKEND_GROUP=AMDGPU"
# test_args: "--BACKEND_GROUP=AMDGPU"
# - JuliaCI/julia-coverage#v1:
# codecov: true
# dirs:
Expand All @@ -59,7 +59,7 @@ steps:
version: "{{matrix.julia}}"
- JuliaCI/julia-test#v1:
project: "lib/MLDataDevices"
test_args: "BACKEND_GROUP=Metal"
test_args: "--BACKEND_GROUP=Metal"
- JuliaCI/julia-coverage#v1:
codecov: true
dirs:
Expand All @@ -85,7 +85,7 @@ steps:
version: "{{matrix.julia}}"
- JuliaCI/julia-test#v1:
project: "lib/MLDataDevices"
test_args: "BACKEND_GROUP=oneAPI"
test_args: "--BACKEND_GROUP=oneAPI"
- JuliaCI/julia-coverage#v1:
codecov: true
dirs:
Expand Down
8 changes: 4 additions & 4 deletions .buildkite/testing_weightinitializers.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ steps:
version: "{{matrix.julia}}"
- JuliaCI/julia-test#v1:
project: "lib/WeightInitializers"
test_args: "BACKEND_GROUP=CUDA"
test_args: "--BACKEND_GROUP=CUDA"
- JuliaCI/julia-coverage#v1:
codecov: true
dirs:
Expand All @@ -31,7 +31,7 @@ steps:
# version: "{{matrix.julia}}"
# - JuliaCI/julia-test#v1:
# project: "lib/WeightInitializers"
# test_args: "BACKEND_GROUP=AMDGPU"
# test_args: "--BACKEND_GROUP=AMDGPU"
# - JuliaCI/julia-coverage#v1:
# codecov: true
# dirs:
Expand All @@ -56,7 +56,7 @@ steps:
version: "{{matrix.julia}}"
- JuliaCI/julia-test#v1:
project: "lib/WeightInitializers"
test_args: "BACKEND_GROUP=Metal"
test_args: "--BACKEND_GROUP=Metal"
- JuliaCI/julia-coverage#v1:
codecov: true
dirs:
Expand All @@ -82,7 +82,7 @@ steps:
version: "{{matrix.julia}}"
- JuliaCI/julia-test#v1:
project: "lib/WeightInitializers"
test_args: "BACKEND_GROUP=oneAPI"
test_args: "--BACKEND_GROUP=oneAPI"
- JuliaCI/julia-coverage#v1:
codecov: true
dirs:
Expand Down
15 changes: 10 additions & 5 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,16 @@ on:
- "test/**"
- "Project.toml"
- ".github/workflows/CI.yml"
- "lib/LuxTestUtils/**"
- "lib/LuxCore/**"
- "lib/MLDataDevices/**"
- "lib/WeightInitializers/**"
- "lib/LuxLib/**"
- "lib/LuxTestUtils/src/**"
- "lib/LuxTestUtils/ext/**"
- "lib/LuxCore/src/**"
- "lib/LuxCore/ext/**"
- "lib/MLDataDevices/src/**"
- "lib/MLDataDevices/ext/**"
- "lib/WeightInitializers/src/**"
- "lib/WeightInitializers/ext/**"
- "lib/LuxLib/src/**"
- "lib/LuxLib/ext/**"
push:
branches:
- main
Expand Down
15 changes: 10 additions & 5 deletions .github/workflows/CIPreRelease.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,16 @@
# - "test/**"
# - "Project.toml"
# - ".github/workflows/CI.yml"
# - "lib/LuxTestUtils/**"
# - "lib/LuxCore/**"
# - "lib/MLDataDevices/**"
# - "lib/WeightInitializers/**"
# - "lib/LuxLib/**"
# - "lib/LuxTestUtils/src/**"
# - "lib/LuxTestUtils/ext/**"
# - "lib/LuxCore/src/**"
# - "lib/LuxCore/ext/**"
# - "lib/MLDataDevices/src/**"
# - "lib/MLDataDevices/ext/**"
# - "lib/WeightInitializers/src/**"
# - "lib/WeightInitializers/ext/**"
# - "lib/LuxLib/src/**"
# - "lib/LuxLib/ext/**"
# push:
# branches:
# - main
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/CI_LuxCore.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ on:
- ".github/workflows/CommonCI.yml"
- "lib/LuxCore/**"
- ".github/workflows/CI_LuxCore.yml"
- "lib/MLDataDevices/**"
- "lib/MLDataDevices/src/**"
- "lib/MLDataDevices/ext/**"
push:
branches:
- main
Expand Down
9 changes: 6 additions & 3 deletions .github/workflows/CI_LuxLib.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,12 @@ on:
- ".github/workflows/CommonCI.yml"
- "lib/LuxLib/**"
- ".github/workflows/CI_LuxLib.yml"
- "lib/LuxTestUtils/**"
- "lib/LuxCore/**"
- "lib/MLDataDevices/**"
- "lib/LuxTestUtils/src/**"
- "lib/LuxTestUtils/ext/**"
- "lib/LuxCore/src/**"
- "lib/LuxCore/ext/**"
- "lib/MLDataDevices/src/**"
- "lib/MLDataDevices/ext/**"
push:
branches:
- main
Expand Down
9 changes: 7 additions & 2 deletions .github/workflows/CI_MLDataDevices.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,16 @@ jobs:
- cpu
- opencl
- reactant
exclude:
- os: windows-latest
group: opencl
- os: macos-latest
group: opencl
uses: ./.github/workflows/CommonCI.yml
with:
julia_version: "1.12"
project: "lib/MLDataDevices"
test_args: "BACKEND_GROUP=${{ matrix.group }}"
test_args: "--BACKEND_GROUP=${{ matrix.group }}"
os: ${{ matrix.os }}

downgrade:
Expand All @@ -48,4 +53,4 @@ jobs:
julia_version: "1.11"
project: "lib/MLDataDevices"
downgrade_testing: true
test_args: "BACKEND_GROUP=${{ matrix.group }}"
test_args: "--BACKEND_GROUP=${{ matrix.group }}"
4 changes: 2 additions & 2 deletions .github/workflows/CI_WeightInitializers.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@ jobs:
with:
julia_version: "1.12"
project: "lib/WeightInitializers"
test_args: "BACKEND_GROUP=cpu"
test_args: "--BACKEND_GROUP=cpu"

downgrade:
uses: ./.github/workflows/CommonCI.yml
with:
julia_version: "1.11"
project: "lib/WeightInitializers"
downgrade_testing: true
test_args: "BACKEND_GROUP=cpu"
test_args: "--BACKEND_GROUP=cpu"
10 changes: 6 additions & 4 deletions lib/LuxCore/test/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,23 @@ Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196"
LuxCore = "bb33d45b-7691-41d6-9220-0943567d0623"
MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40"
Optimisers = "3bd65402-5787-11e9-1adc-39752487f4e2"
ParallelTestRunner = "d3525ed8-44d0-4b2c-a655-542cee43accc"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[sources]
LuxCore = {path = ".."}
MLDataDevices = {path = "../../MLDataDevices"}

[compat]
Aqua = "0.8.7"
EnzymeCore = "0.8.14"
ExplicitImports = "1.9.0"
Functors = "0.5"
MLDataDevices = "1.17"
Optimisers = "0.3.4, 0.4"
ParallelTestRunner = "2.1"
Random = "1.10"
Setfield = "1.1"
Test = "1.10"

[sources]
LuxCore = {path = ".."}
MLDataDevices = {path = "../../MLDataDevices"}
44 changes: 44 additions & 0 deletions lib/LuxCore/test/abstractluxcontainerlayer.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
using LuxCore, Test, Random

rng = LuxCore.Internal.default_rng()

include("common.jl")

@testset "AbstractLuxContainerLayer Interface" begin
model = Chain((; layer_1=Dense(5, 5), layer_2=Dense(5, 6)))
x = randn(rng, Float32, 5)
ps, st = LuxCore.setup(rng, model)

@test fieldnames(typeof(ps)) == (:layers,)
@test fieldnames(typeof(st)) == (:layers,)

@test LuxCore.parameterlength(ps) ==
LuxCore.parameterlength(model) ==
LuxCore.parameterlength(model.layers[1]) + LuxCore.parameterlength(model.layers[2])
@test LuxCore.statelength(st) ==
LuxCore.statelength(model) ==
LuxCore.statelength(model.layers[1]) + LuxCore.statelength(model.layers[2])

@test LuxCore.apply(model, x, ps, st) == model(x, ps, st)

@test LuxCore.stateless_apply(model, x, ps) == first(LuxCore.apply(model, x, ps, st))

@test_nowarn println(model)

model = Chain2(Dense(5, 5), Dense(5, 6))
x = randn(rng, Float32, 5)
ps, st = LuxCore.setup(rng, model)

@test LuxCore.parameterlength(ps) ==
LuxCore.parameterlength(model) ==
LuxCore.parameterlength(model.layer1) + LuxCore.parameterlength(model.layer2)
@test LuxCore.statelength(st) ==
LuxCore.statelength(model) ==
LuxCore.statelength(model.layer1) + LuxCore.statelength(model.layer2)

@test LuxCore.apply(model, x, ps, st) == model(x, ps, st)

@test LuxCore.stateless_apply(model, x, ps) == first(LuxCore.apply(model, x, ps, st))

@test_nowarn println(model)
end
61 changes: 61 additions & 0 deletions lib/LuxCore/test/abstractluxlayer.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
using LuxCore, Test, Random, Functors

rng = LuxCore.Internal.default_rng()

include("common.jl")

@testset "AbstractLuxLayer Interface" begin
@testset "Custom Layer" begin
model = Dense(5, 6)
x = randn(rng, Float32, 5)
ps, st = LuxCore.setup(rng, model)

@test LuxCore.parameterlength(ps) == LuxCore.parameterlength(model)
@test LuxCore.statelength(st) == LuxCore.statelength(model)

@test LuxCore.apply(model, x, ps, st) == model(x, ps, st)

@test LuxCore.stateless_apply(model, x, ps) ==
first(LuxCore.apply(model, x, ps, NamedTuple()))

@test_nowarn println(model)

@testset for wrapper in (DenseWrapper, DenseWrapper2)
model2 = wrapper(model)
ps, st = LuxCore.setup(rng, model2)

@test LuxCore.parameterlength(ps) == LuxCore.parameterlength(model2)
@test LuxCore.statelength(st) == LuxCore.statelength(model2)

@test model2(x, ps, st)[1] == model(x, ps, st)[1]

@test_nowarn println(model2)
end
end

@testset "Default Fallbacks" begin
struct NoParamStateLayer <: AbstractLuxLayer end

layer = NoParamStateLayer()
@test LuxCore.initialparameters(rng, layer) == NamedTuple()
@test LuxCore.initialstates(rng, layer) == NamedTuple()

@test LuxCore.parameterlength(zeros(10, 2)) == 20
@test LuxCore.statelength(zeros(10, 2)) == 20
@test LuxCore.statelength(Val(true)) == 1
@test LuxCore.statelength((zeros(10), zeros(5, 2))) == 20
@test LuxCore.statelength((layer_1=zeros(10), layer_2=zeros(5, 2))) == 20

@test LuxCore.initialparameters(rng, NamedTuple()) == NamedTuple()
@test_throws MethodError LuxCore.initialparameters(rng, ())
@test LuxCore.initialparameters(rng, nothing) == NamedTuple()
@test LuxCore.initialparameters(rng, (nothing, layer)) ==
(NamedTuple(), NamedTuple())

@test LuxCore.initialstates(rng, NamedTuple()) == NamedTuple()
@test_throws MethodError LuxCore.initialstates(rng, ())
@test LuxCore.initialstates(rng, nothing) == NamedTuple()
@test LuxCore.initialparameters(rng, (nothing, layer)) ==
(NamedTuple(), NamedTuple())
end
end
29 changes: 29 additions & 0 deletions lib/LuxCore/test/abstractluxwrapperlayer.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
using LuxCore, Test, Random

rng = LuxCore.Internal.default_rng()

include("common.jl")

@testset "AbstractLuxWrapperLayer Interface" begin
model = ChainWrapper((; layer_1=Dense(5, 10), layer_2=Dense(10, 5)))
x = randn(rng, Float32, 5)
ps, st = LuxCore.setup(rng, model)

@test fieldnames(typeof(ps)) == (:layer_1, :layer_2)
@test fieldnames(typeof(st)) == (:layer_1, :layer_2)

@test LuxCore.parameterlength(ps) ==
LuxCore.parameterlength(model) ==
LuxCore.parameterlength(model.layers.layer_1) +
LuxCore.parameterlength(model.layers.layer_2)
@test LuxCore.statelength(st) ==
LuxCore.statelength(model) ==
LuxCore.statelength(model.layers.layer_1) +
LuxCore.statelength(model.layers.layer_2)

@test LuxCore.apply(model, x, ps, st) == model(x, ps, st)

@test LuxCore.stateless_apply(model, x, ps) == first(LuxCore.apply(model, x, ps, st))

@test_nowarn println(model)
end
Loading
Loading