diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 307fad215..8aba89644 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -37,7 +37,7 @@ Status `~/.julia/environments/v1.8/Project.toml` ## Utilities Finch include several scripts that can be executed directly, e.g. `runtests.jl`. -These scripts are all have local [Pkg +These scripts all have local [Pkg environments](https://pkgdocs.julialang.org/v1/getting-started/#Getting-Started-with-Environments). The scripts include convenience headers to automatically use their respective environments, so you won't need to worry about `--project=.` flags, etc. diff --git a/docs/src/docs/internals/tensor_interface.md b/docs/src/docs/internals/tensor_interface.md index f1530a310..b12073ff5 100644 --- a/docs/src/docs/internals/tensor_interface.md +++ b/docs/src/docs/internals/tensor_interface.md @@ -4,7 +4,7 @@ CurrentModule = Finch # Tensor Interface -The `AbstractTensor` interface (defined in `src/abstract_tensor.jl`) is the interface through which Finch understands tensors. It is a high-level interace which allows tensors to interact with the rest of the Finch system. The interface is designed to be extensible, allowing users to define their own tensor types and behaviors. For a minimal example, read the definitions in [`/ext/SparseArraysExt.jl`](https://github.com/finch-tensor/Finch.jl/blob/main/ext/SparseArraysExt.jl) and in [`/src/interface/abstractarray.jl`](https://github.com/finch-tensor/Finch.jl/blob/main/src/interface/abstractarray.jl). Once these methods are defined that tell Finch how to generate code for an array, the `AbstractTensor` interface will also use Finch to generate code for several Julia `AbstractArray` methods, such as `getindex`, `setindex!`, `map`, and `reduce`. An important note: `getindex` and `setindex!` are not a source of truth for Finch tensors. Search the codebase for `::AbstractTensor` for a full list of methods that are implemented for `AbstractTensor`. Note than most `AbstractTensor` implement `labelled_show` and `labelled_children` methods instead of `show(::IO, ::MIME"text/plain", t::AbstractTensor)` for pretty printed display. +The `AbstractTensor` interface (defined in `src/abstract_tensor.jl`) is the interface through which Finch understands tensors. It is a high-level interace which allows tensors to interact with the rest of the Finch system. The interface is designed to be extensible, allowing users to define their own tensor types and behaviors. For a minimal example, read the definitions in [`/ext/SparseArraysExt.jl`](https://github.com/finch-tensor/Finch.jl/blob/main/ext/SparseArraysExt.jl) and in [`/src/interface/abstractarray.jl`](https://github.com/finch-tensor/Finch.jl/blob/main/src/interface/abstractarray.jl). Once these methods are defined that tell Finch how to generate code for an array, the `AbstractTensor` interface will also use Finch to generate code for several Julia `AbstractArray` methods, such as `getindex`, `setindex!`, `map`, and `reduce`. An important note: `getindex` and `setindex!` are not a source of truth for Finch tensors. Search the codebase for `::AbstractTensor` for a full list of methods that are implemented for `AbstractTensor`. Note that most `AbstractTensor` implement `labelled_show` and `labelled_children` methods instead of `show(::IO, ::MIME"text/plain", t::AbstractTensor)` for pretty printed display. ## Tensor Methods diff --git a/ext/SparseArraysExt.jl b/ext/SparseArraysExt.jl index 320ff17b9..b8155ffd8 100644 --- a/ext/SparseArraysExt.jl +++ b/ext/SparseArraysExt.jl @@ -680,4 +680,18 @@ Finch.virtual_eltype(ctx, tns::VirtualSparseVector) = tns.Tv SparseArrays.nnz(fbr::Tensor) = countstored(fbr) +function Base.:\(A::Tensor, B::Tensor) + As = SparseArrays.sparse(A) + Bs = SparseArrays.sparse(B) + return As \ Bs end +function Base.:\(A::Tensor, b::Vector) + As = SparseArrays.sparse(A) + return As \ b +end +function Base.:\(a::Vector, B::Tensor) + Bs = SparseArrays.sparse(B) + return a \ Bs +end + +end \ No newline at end of file diff --git a/src/interface/eager.jl b/src/interface/eager.jl index d2bbed9c6..36ef4f3b4 100644 --- a/src/interface/eager.jl +++ b/src/interface/eager.jl @@ -142,6 +142,27 @@ Base.:-(x::AbstractTensor, y::AbstractTensor) = map(-, x, y) Base.:/(x::AbstractTensor, y::Number) = map(/, x, y) Base.:/(x::Number, y::AbstractTensor) = map(\, y, x) +function Base.:\(A::AbstractTensor, b::AbstractTensor) + throw(FinchExtensionError( + "SparseArrays.jl must be loaded to do matrix division (\\) (hint: `using SparseArays`)" + )) +end +function Base.:\(A::AbstractTensor, b::AbstractVector) + throw(FinchExtensionError( + "SparseArrays.jl must be loaded to do matrix division (\\) (hint: `using SparseArays`)" + )) +end +function Base.:\(A::AbstractVector, b::AbstractTensor) + throw(FinchExtensionError( + "SparseArrays.jl must be loaded to do matrix division (\\) (hint: `using SparseArays`)" + )) +end +function Base.:\(A::AbstractVector, b::AbstractVector) + throw(FinchExtensionError( + "SparseArrays.jl must be loaded to do matrix division (\\) (hint: `using SparseArays`)" + )) +end + const AbstractTensorOrBroadcast = Union{ <:AbstractTensor,<:Broadcasted{FinchStyle{N}} where {N} } diff --git a/test/reference32/interface/sparsearrays_matdiv.txt b/test/reference32/interface/sparsearrays_matdiv.txt new file mode 100644 index 000000000..eea56307d --- /dev/null +++ b/test/reference32/interface/sparsearrays_matdiv.txt @@ -0,0 +1,36 @@ +julia> A = Tensor(CSCFormat(), [1.0 0.0; 0.0 1.0]) +2×2 Tensor{DenseLevel{Int32, SparseListLevel{Int32, Vector{Int32}, Vector{Int32}, ElementLevel{0.0, Float64, Int32, Vector{Float64}}}}}: + 1.0 0.0 + 0.0 1.0 +julia> b = [1.0, 1.0] +2-element Vector{Float64}: + 1.0 + 1.0 +julia> A \ b +2-element Vector{Float64}: + 1.0 + 1.0 +julia> c = [1.0, 0.0] +2-element Vector{Float64}: + 1.0 + 0.0 +julia> D = Tensor(CSCFormat(), [1.0 1.0; 0.0 0.0]) +2×2 Tensor{DenseLevel{Int32, SparseListLevel{Int32, Vector{Int32}, Vector{Int32}, ElementLevel{0.0, Float64, Int32, Vector{Float64}}}}}: + 1.0 1.0 + 0.0 0.0 +julia> c \ D +1×2 transpose(::Vector{Float64}) with eltype Float64: + 1.0 1.0 +julia> E = Tensor(CSCFormat(), [1.0 0.0; 0.0 1.0]) +2×2 Tensor{DenseLevel{Int32, SparseListLevel{Int32, Vector{Int32}, Vector{Int32}, ElementLevel{0.0, Float64, Int32, Vector{Float64}}}}}: + 1.0 0.0 + 0.0 1.0 +julia> F = Tensor(CSCFormat(), [1.0 1.0; 1.0 1.0]) +2×2 Tensor{DenseLevel{Int32, SparseListLevel{Int32, Vector{Int32}, Vector{Int32}, ElementLevel{0.0, Float64, Int32, Vector{Float64}}}}}: + 1.0 1.0 + 1.0 1.0 +julia> E \ F +2×2 SparseMatrixCSC{Float64, Int32} with 4 stored entries: + 1.0 1.0 + 1.0 1.0 + diff --git a/test/reference64/interface/sparsearrays_matdiv.txt b/test/reference64/interface/sparsearrays_matdiv.txt new file mode 100644 index 000000000..225f455c4 --- /dev/null +++ b/test/reference64/interface/sparsearrays_matdiv.txt @@ -0,0 +1,36 @@ +julia> A = Tensor(CSCFormat(), [1.0 0.0; 0.0 1.0]) +2×2 Tensor{DenseLevel{Int64, SparseListLevel{Int64, Vector{Int64}, Vector{Int64}, ElementLevel{0.0, Float64, Int64, Vector{Float64}}}}}: + 1.0 0.0 + 0.0 1.0 +julia> b = [1.0, 1.0] +2-element Vector{Float64}: + 1.0 + 1.0 +julia> A \ b +2-element Vector{Float64}: + 1.0 + 1.0 +julia> c = [1.0, 0.0] +2-element Vector{Float64}: + 1.0 + 0.0 +julia> D = Tensor(CSCFormat(), [1.0 1.0; 0.0 0.0]) +2×2 Tensor{DenseLevel{Int64, SparseListLevel{Int64, Vector{Int64}, Vector{Int64}, ElementLevel{0.0, Float64, Int64, Vector{Float64}}}}}: + 1.0 1.0 + 0.0 0.0 +julia> c \ D +1×2 transpose(::Vector{Float64}) with eltype Float64: + 1.0 1.0 +julia> E = Tensor(CSCFormat(), [1.0 0.0; 0.0 1.0]) +2×2 Tensor{DenseLevel{Int64, SparseListLevel{Int64, Vector{Int64}, Vector{Int64}, ElementLevel{0.0, Float64, Int64, Vector{Float64}}}}}: + 1.0 0.0 + 0.0 1.0 +julia> F = Tensor(CSCFormat(), [1.0 1.0; 1.0 1.0]) +2×2 Tensor{DenseLevel{Int64, SparseListLevel{Int64, Vector{Int64}, Vector{Int64}, ElementLevel{0.0, Float64, Int64, Vector{Float64}}}}}: + 1.0 1.0 + 1.0 1.0 +julia> E \ F +2×2 SparseMatrixCSC{Float64, Int64} with 4 stored entries: + 1.0 1.0 + 1.0 1.0 + diff --git a/test/suites/interface_tests.jl b/test/suites/interface_tests.jl index 193a7c074..5c340cc5e 100644 --- a/test/suites/interface_tests.jl +++ b/test/suites/interface_tests.jl @@ -1024,6 +1024,23 @@ end B = compute(expanddims(sum(lazy(A)); dims=1)) @test size(B) == (1,) end + + #https://github.com/finch-tensor/Finch.jl/issues/768 + let + io = IOBuffer() + + @repl io A = Tensor(CSCFormat(), [1.0 0.0; 0.0 1.0]) + @repl io b = [1.0, 1.0] + @repl io A \ b + @repl io c = [1.0, 0.0] + @repl io D = Tensor(CSCFormat(), [1.0 1.0; 0.0 0.0]) + @repl io c \ D + @repl io E = Tensor(CSCFormat(), [1.0 0.0; 0.0 1.0]) + @repl io F = Tensor(CSCFormat(), [1.0 1.0; 1.0 1.0]) + @repl io E \ F + + @test check_output("interface/sparsearrays_matdiv.txt", String(take!(io))) + end end end end