Browse Source

Move SparseMatrixCSR support to an extension

pull/24/head
Fredrik Ekre 1 year ago
parent
commit
ff450f429a
No known key found for this signature in database
GPG Key ID: DE82E6D5E364C0A2
  1. 7
      .github/workflows/Check.yml
  2. 3
      CHANGELOG.md
  3. 7
      Project.toml
  4. 4
      ext/HYPREPartitionedArrays.jl
  5. 80
      ext/HYPRESparseMatricesCSR.jl
  6. 48
      src/HYPRE.jl
  7. 15
      src/solvers.jl
  8. 18
      test/runtests.jl

7
.github/workflows/Check.yml

@ -33,19 +33,20 @@ jobs:
]) ])
Pkg.add([ Pkg.add([
PackageSpec(name = "ExplicitImports", version = "1.9"), PackageSpec(name = "ExplicitImports", version = "1.9"),
PackageSpec(name = "PartitionedArrays", version = "0.5"), PackageSpec(name = "PartitionedArrays"),
PackageSpec(name = "SparseMatricesCSR"),
]) ])
- name: ExplicitImports.jl code checks - name: ExplicitImports.jl code checks
shell: julia --project=@explicit-imports {0} shell: julia --project=@explicit-imports {0}
run: | run: |
using HYPRE, ExplicitImports, PartitionedArrays using HYPRE, ExplicitImports, PartitionedArrays, SparseMatricesCSR
# Check HYPRE # Check HYPRE
check_no_implicit_imports(HYPRE) check_no_implicit_imports(HYPRE)
check_no_stale_explicit_imports(HYPRE) check_no_stale_explicit_imports(HYPRE)
check_all_qualified_accesses_via_owners(HYPRE) check_all_qualified_accesses_via_owners(HYPRE)
check_no_self_qualified_accesses(HYPRE) check_no_self_qualified_accesses(HYPRE)
# Check extension modules # Check extension modules
for ext in (:HYPREPartitionedArrays,) for ext in (:HYPREPartitionedArrays, :HYPRESparseMatricesCSR)
extmod = Base.get_extension(HYPRE, ext) extmod = Base.get_extension(HYPRE, ext)
if extmod !== nothing if extmod !== nothing
check_no_implicit_imports(extmod) check_no_implicit_imports(extmod)

3
CHANGELOG.md

@ -9,9 +9,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed ### Changed
- PartitionedArrays.jl dependency upgraded from release series 0.3.x to release series - PartitionedArrays.jl dependency upgraded from release series 0.3.x to release series
0.5.x. ([#17], [#18]) 0.5.x. ([#17], [#18])
- PartitionedArrays.jl support is now moved to a package extension. ([#23])
- CEnum.jl dependency upgraded to release series 0.5.x (release series 0.4.x still - CEnum.jl dependency upgraded to release series 0.5.x (release series 0.4.x still
allowed). ([#17], [#18]) allowed). ([#17], [#18])
- PartitionedArrays.jl support is now moved to a package extension. ([#23])
- SparseMatricesCSR.jl support is now moved to a package extension. ([#24])
## [v1.5.0] - 2023-05-26 ## [v1.5.0] - 2023-05-26
### Changed ### Changed

7
Project.toml

@ -13,9 +13,11 @@ SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1"
[weakdeps] [weakdeps]
PartitionedArrays = "5a9dfac6-5c52-46f7-8278-5e2210713be9" PartitionedArrays = "5a9dfac6-5c52-46f7-8278-5e2210713be9"
SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1"
[extensions] [extensions]
HYPREPartitionedArrays = "PartitionedArrays" HYPREPartitionedArrays = ["PartitionedArrays", "SparseMatricesCSR"]
HYPRESparseMatricesCSR = "SparseMatricesCSR"
[compat] [compat]
CEnum = "0.4, 0.5" CEnum = "0.4, 0.5"
@ -27,7 +29,8 @@ julia = "1.6"
[extras] [extras]
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
PartitionedArrays = "5a9dfac6-5c52-46f7-8278-5e2210713be9" PartitionedArrays = "5a9dfac6-5c52-46f7-8278-5e2210713be9"
SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[targets] [targets]
test = ["LinearAlgebra", "PartitionedArrays", "Test"] test = ["LinearAlgebra", "PartitionedArrays", "SparseMatricesCSR", "Test"]

4
ext/HYPREPartitionedArrays.jl

@ -184,7 +184,7 @@ function Internals.get_proc_rows(A::Union{PSparseMatrix, PVector})
return ilower, iupper return ilower, iupper
end end
function HYPREMatrix(B::PSparseMatrix) function HYPRE.HYPREMatrix(B::PSparseMatrix)
# Use the same communicator as the matrix # Use the same communicator as the matrix
comm = Internals.get_comm(B) comm = Internals.get_comm(B)
# Fetch rows owned by this process # Fetch rows owned by this process
@ -206,7 +206,7 @@ end
# PartitionedArrays.PVector -> HYPREVector # # PartitionedArrays.PVector -> HYPREVector #
############################################ ############################################
function HYPREVector(v::PVector) function HYPRE.HYPREVector(v::PVector)
# Use the same communicator as the matrix # Use the same communicator as the matrix
comm = Internals.get_comm(v) comm = Internals.get_comm(v)
# Fetch rows owned by this process # Fetch rows owned by this process

80
ext/HYPRESparseMatricesCSR.jl

@ -0,0 +1,80 @@
module HYPRESparseMatricesCSR
using HYPRE.LibHYPRE: @check, HYPRE_BigInt, HYPRE_Complex, HYPRE_Int
using HYPRE: HYPRE, HYPREMatrix, HYPRESolver, HYPREVector, HYPRE_IJMatrixSetValues, Internals
using MPI: MPI
using SparseArrays: SparseArrays, nonzeros, nzrange
using SparseMatricesCSR: SparseMatrixCSR, colvals
##################################
# SparseMatrixCSR -> HYPREMatrix #
##################################
function Internals.to_hypre_data(A::SparseMatrixCSR, ilower, iupper)
Internals.check_n_rows(A, ilower, iupper)
nnz = SparseArrays.nnz(A)
A_cols = colvals(A)
A_vals = nonzeros(A)
# Initialize the data buffers HYPRE wants
nrows = HYPRE_Int(iupper - ilower + 1) # Total number of rows
ncols = Vector{HYPRE_Int}(undef, nrows) # Number of colums for each row
rows = collect(HYPRE_BigInt, ilower:iupper) # The row indices
cols = Vector{HYPRE_BigInt}(undef, nnz) # The column indices
values = Vector{HYPRE_Complex}(undef, nnz) # The values
# Loop over the rows and collect all values
k = 0
@inbounds for i in 1:size(A, 1)
nzr = nzrange(A, i)
ncols[i] = length(nzr)
for j in nzr
k += 1
col = A_cols[j]
val = A_vals[j]
cols[k] = col
values[k] = val
end
end
@assert nnz == k
@assert nrows == length(ncols) == length(rows)
return nrows, ncols, rows, cols, values
end
# Note: keep in sync with the SparseMatrixCSC method
function HYPRE.HYPREMatrix(comm::MPI.Comm, B::SparseMatrixCSR, ilower, iupper)
A = HYPREMatrix(comm, ilower, iupper)
nrows, ncols, rows, cols, values = Internals.to_hypre_data(B, ilower, iupper)
@check HYPRE_IJMatrixSetValues(A, nrows, ncols, rows, cols, values)
Internals.assemble_matrix(A)
return A
end
# Note: keep in sync with the SparseMatrixCSC method
function HYPRE.HYPREMatrix(B::SparseMatrixCSR, ilower=1, iupper=size(B, 1))
return HYPREMatrix(MPI.COMM_SELF, B, ilower, iupper)
end
####################################
# SparseMatrixCSR solver interface #
####################################
# Note: keep in sync with the SparseMatrixCSC method
function HYPRE.solve(solver::HYPRESolver, A::SparseMatrixCSR, b::Vector)
hypre_x = HYPRE.solve(solver, HYPREMatrix(A), HYPREVector(b))
x = copy!(similar(b, HYPRE_Complex), hypre_x)
return x
end
# Note: keep in sync with the SparseMatrixCSC method
function HYPRE.solve!(solver::HYPRESolver, x::Vector, A::SparseMatrixCSR, b::Vector)
hypre_x = HYPREVector(x)
HYPRE.solve!(solver, hypre_x, HYPREMatrix(A), HYPREVector(b))
copy!(x, hypre_x)
return x
end
end # module HYPRESparseMatricesCSR

48
src/HYPRE.jl

@ -4,7 +4,6 @@ module HYPRE
using MPI: MPI using MPI: MPI
using SparseArrays: SparseArrays, SparseMatrixCSC, nonzeros, nzrange, rowvals using SparseArrays: SparseArrays, SparseMatrixCSC, nonzeros, nzrange, rowvals
using SparseMatricesCSR: SparseMatrixCSR, colvals
export HYPREMatrix, HYPREVector export HYPREMatrix, HYPREVector
@ -184,9 +183,9 @@ function Base.zero(b::HYPREVector)
return x return x
end end
###################################### ##################################
# SparseMatrixCS(C|R) -> HYPREMatrix # # SparseMatrixCSC -> HYPREMatrix #
###################################### ##################################
function Internals.check_n_rows(A, ilower, iupper) function Internals.check_n_rows(A, ilower, iupper)
if size(A, 1) != (iupper - ilower + 1) if size(A, 1) != (iupper - ilower + 1)
@ -233,38 +232,8 @@ function Internals.to_hypre_data(A::SparseMatrixCSC, ilower, iupper)
return nrows, ncols, rows, cols, values return nrows, ncols, rows, cols, values
end end
function Internals.to_hypre_data(A::SparseMatrixCSR, ilower, iupper) # Note: keep in sync with the SparseMatrixCSR method
Internals.check_n_rows(A, ilower, iupper) function HYPREMatrix(comm::MPI.Comm, B::SparseMatrixCSC, ilower, iupper)
nnz = SparseArrays.nnz(A)
A_cols = colvals(A)
A_vals = nonzeros(A)
# Initialize the data buffers HYPRE wants
nrows = HYPRE_Int(iupper - ilower + 1) # Total number of rows
ncols = Vector{HYPRE_Int}(undef, nrows) # Number of colums for each row
rows = collect(HYPRE_BigInt, ilower:iupper) # The row indices
cols = Vector{HYPRE_BigInt}(undef, nnz) # The column indices
values = Vector{HYPRE_Complex}(undef, nnz) # The values
# Loop over the rows and collect all values
k = 0
@inbounds for i in 1:size(A, 1)
nzr = nzrange(A, i)
ncols[i] = length(nzr)
for j in nzr
k += 1
col = A_cols[j]
val = A_vals[j]
cols[k] = col
values[k] = val
end
end
@assert nnz == k
@assert nrows == length(ncols) == length(rows)
return nrows, ncols, rows, cols, values
end
function HYPREMatrix(comm::MPI.Comm, B::Union{SparseMatrixCSC,SparseMatrixCSR}, ilower, iupper)
A = HYPREMatrix(comm, ilower, iupper) A = HYPREMatrix(comm, ilower, iupper)
nrows, ncols, rows, cols, values = Internals.to_hypre_data(B, ilower, iupper) nrows, ncols, rows, cols, values = Internals.to_hypre_data(B, ilower, iupper)
@check HYPRE_IJMatrixSetValues(A, nrows, ncols, rows, cols, values) @check HYPRE_IJMatrixSetValues(A, nrows, ncols, rows, cols, values)
@ -272,8 +241,10 @@ function HYPREMatrix(comm::MPI.Comm, B::Union{SparseMatrixCSC,SparseMatrixCSR},
return A return A
end end
HYPREMatrix(B::Union{SparseMatrixCSC,SparseMatrixCSR}, ilower=1, iupper=size(B, 1)) = # Note: keep in sync with the SparseMatrixCSC method
HYPREMatrix(MPI.COMM_SELF, B, ilower, iupper) function HYPREMatrix(B::SparseMatrixCSC, ilower=1, iupper=size(B, 1))
return HYPREMatrix(MPI.COMM_SELF, B, ilower, iupper)
end
######################### #########################
# Vector -> HYPREVector # # Vector -> HYPREVector #
@ -504,6 +475,7 @@ include("solver_options.jl")
# Compatibility for Julias that doesn't support package extensions # Compatibility for Julias that doesn't support package extensions
if !(isdefined(Base, :get_extension)) if !(isdefined(Base, :get_extension))
include("../ext/HYPREPartitionedArrays.jl") include("../ext/HYPREPartitionedArrays.jl")
include("../ext/HYPRESparseMatricesCSR.jl")
end end
end # module HYPRE end # module HYPRE

15
src/solvers.jl

@ -51,18 +51,19 @@ See also [`solve`](@ref).
solve!(pcg::HYPRESolver, x::HYPREVector, A::HYPREMatrix, ::HYPREVector) solve!(pcg::HYPRESolver, x::HYPREVector, A::HYPREMatrix, ::HYPREVector)
######################################## ####################################
# SparseMatrixCS(C|R) solver interface # # SparseMatrixCSC solver interface #
######################################## ####################################
# TODO: This could use the HYPRE compile flag for sequential mode to avoid MPI overhead # Note: keep in sync with the SparseMatrixCSR method
function solve(solver::HYPRESolver, A::SparseMatrixCSC, b::Vector)
function solve(solver::HYPRESolver, A::Union{SparseMatrixCSC,SparseMatrixCSR}, b::Vector)
hypre_x = solve(solver, HYPREMatrix(A), HYPREVector(b)) hypre_x = solve(solver, HYPREMatrix(A), HYPREVector(b))
x = copy!(similar(b, HYPRE_Complex), hypre_x) x = copy!(similar(b, HYPRE_Complex), hypre_x)
return x return x
end end
function solve!(solver::HYPRESolver, x::Vector, A::Union{SparseMatrixCSC,SparseMatrixCSR}, b::Vector)
# Note: keep in sync with the SparseMatrixCSR method
function solve!(solver::HYPRESolver, x::Vector, A::SparseMatrixCSC, b::Vector)
hypre_x = HYPREVector(x) hypre_x = HYPREVector(x)
solve!(solver, hypre_x, HYPREMatrix(A), HYPREVector(b)) solve!(solver, hypre_x, HYPREMatrix(A), HYPREVector(b))
copy!(x, hypre_x) copy!(x, hypre_x)

18
test/runtests.jl

@ -731,18 +731,24 @@ end
@testset "solve with SparseMatrixCS(C|R)" begin @testset "solve with SparseMatrixCS(C|R)" begin
# Setup # Setup
A = sprand(100, 100, 0.05); A = A'A + 5I CSC = sprand(100, 100, 0.05); CSC = CSC'CSC + 5I
CSR = sparsecsr(findnz(CSC)..., size(CSC)...)
b = rand(100) b = rand(100)
x = zeros(100) xcsc = zeros(100)
xcsr = zeros(100)
# Solve # Solve
tol = 1e-9 tol = 1e-9
pcg = HYPRE.PCG(; Tol = tol) pcg = HYPRE.PCG(; Tol = tol)
## solve! ## solve!
HYPRE.solve!(pcg, x, A, b) HYPRE.solve!(pcg, xcsc, CSC, b)
@test x A \ b atol=tol @test xcsc CSC \ b atol=tol
HYPRE.solve!(pcg, xcsr, CSR, b)
@test xcsr CSC \ b atol=tol # TODO: CSR \ b fails
## solve ## solve
x = HYPRE.solve(pcg, A, b) xcsc = HYPRE.solve(pcg, CSC, b)
@test x A \ b atol=tol @test xcsc CSC \ b atol=tol
xcsr = HYPRE.solve(pcg, CSR, b)
@test xcsr CSC \ b atol=tol # TODO: CSR \ b fails
end end
@testset "MPI execution" begin @testset "MPI execution" begin

Loading…
Cancel
Save