Skip to content

Commit

Permalink
assemble: allow for making sparsity block-wise
Browse files Browse the repository at this point in the history
  • Loading branch information
ksagiyam committed Feb 28, 2024
1 parent 2125369 commit 1a27598
Show file tree
Hide file tree
Showing 5 changed files with 78 additions and 19 deletions.
1 change: 1 addition & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ jobs:
--install fascd \
--install defcon \
--install gadopt \
--package-branch pyop2 ksagiyam/sparsity \
|| (cat firedrake-install.log && /bin/false)
- name: Install test dependencies
run: |
Expand Down
61 changes: 46 additions & 15 deletions firedrake/assemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,7 @@ def allocate(self):

@cached_property
def maps_and_regions(self):
# The sparsity could be made tighter by inspecting the form DAG.
test, trial = self._form.arguments()
return ExplicitMatrixAssembler._make_maps_and_regions_default(test, trial, self.allocation_integral_types)

Expand Down Expand Up @@ -1318,12 +1319,10 @@ def _make_sparsity(test, trial, mat_type, sub_mat_type, maps_and_regions):
baij = mat_type == "baij"
if any(len(a.function_space()) > 1 for a in [test, trial]) and mat_type == "baij":
raise ValueError("BAIJ matrix type makes no sense for mixed spaces, use 'aij'")
map_pairs, iteration_regions = maps_and_regions
try:
sparsity = op2.Sparsity((test.function_space().dof_dset,
trial.function_space().dof_dset),
map_pairs,
iteration_regions=iteration_regions,
maps_and_regions,
nest=nest,
block_sparse=baij)
except SparsityFormatError:
Expand All @@ -1333,23 +1332,41 @@ def _make_sparsity(test, trial, mat_type, sub_mat_type, maps_and_regions):

def _make_maps_and_regions(self):
test, trial = self._form.arguments()
allocation_integral_types = set(i.integral_type() for i in self._form.integrals())
for bc in self._bcs:
allocation_integral_types.update(integral.integral_type()
for integral in bc.integrals())
return ExplicitMatrixAssembler._make_maps_and_regions_default(test, trial, allocation_integral_types)
if self._allocation_integral_types is not None:
return ExplicitMatrixAssembler._make_maps_and_regions_default(test, trial, self._allocation_integral_types)
elif any(local_kernel.indices == (None, None) for local_kernel in self._all_local_kernels):
# Handle special cases: slate or split=False
assert all(local_kernel.indices == (None, None) for local_kernel in self._all_local_kernels)
allocation_integral_types = set(local_kernel.kinfo.integral_type
for local_kernel in self._all_local_kernels)
return ExplicitMatrixAssembler._make_maps_and_regions_default(test, trial, allocation_integral_types)
else:
maps_and_regions = defaultdict(lambda: defaultdict(set))
for local_kernel in self._all_local_kernels:
i, j = local_kernel.indices
# Make Sparsity independent of _iterset, which can be a Subset, for better reusability.
get_map, region = ExplicitMatrixAssembler.integral_type_op2_map[local_kernel.kinfo.integral_type]
rmap_ = get_map(test).split[i] if get_map(test) is not None else None
cmap_ = get_map(trial).split[j] if get_map(trial) is not None else None
maps_and_regions[(i, j)][(rmap_, cmap_)].add(region)
return {block_indices: [map_pair + (tuple(region_set), ) for map_pair, region_set in map_pair_to_region_set.items()]
for block_indices, map_pair_to_region_set in maps_and_regions.items()}

@staticmethod
def _make_maps_and_regions_default(test, trial, allocation_integral_types):
domains = defaultdict(set)
# Make maps using outer-product of the component maps
# using the given allocation_integral_types.
if allocation_integral_types is None:
raise ValueError("allocation_integral_types can not be None")
maps_and_regions = defaultdict(lambda: defaultdict(set))
# Use outer product of component maps.
for integral_type in allocation_integral_types:
get_map, region = ExplicitMatrixAssembler.integral_type_op2_map[integral_type]
domains[get_map].add(region)
map_pairs, iteration_regions = zip(*(((get_map(test), get_map(trial)),
tuple(sorted(regions)))
for get_map, regions in domains.items()
if regions))
return tuple(map_pairs), tuple(iteration_regions)
for i, rmap_ in enumerate(get_map(test)):
for j, cmap_ in enumerate(get_map(trial)):
maps_and_regions[(i, j)][(rmap_, cmap_)].add(region)
return {block_indices: [map_pair + (tuple(region_set), ) for map_pair, region_set in map_pair_to_region_set.items()]
for block_indices, map_pair_to_region_set in maps_and_regions.items()}

@classmethod
@property
Expand All @@ -1370,6 +1387,20 @@ def integral_type_op2_map(cls):
"interior_facet_vert": (get_intf_map, op2.ALL)}
return cls._integral_type_op2_map

@cached_property
def _all_local_kernels(self):
"""Collection of parloop_builders used for sparsity construction.
When constructing sparsity, we use all parloop_builders
that are to be used in the actual assembly.
"""
all_local_kernels = tuple(local_kernel for local_kernel, _ in self.local_kernels)
for bc in self._bcs:
if isinstance(bc, EquationBCSplit):
_assembler = type(self)(bc.f, bcs=bc.bcs, form_compiler_parameters=self._form_compiler_params, needs_zeroing=False)
all_local_kernels += _assembler._all_local_kernels
return all_local_kernels

def _apply_bc(self, tensor, bc):
op2tensor = tensor.M
spaces = tuple(a.function_space() for a in tensor.a.arguments())
Expand Down
2 changes: 1 addition & 1 deletion firedrake/interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -938,7 +938,7 @@ def make_interpolator(expr, V, subset, access, bcs=None):
tensor = None
else:
sparsity = op2.Sparsity((V.dof_dset, argfs.dof_dset),
((V.cell_node_map(), argfs_map),),
[(V.cell_node_map(), argfs_map, None)], # non-mixed
name="%s_%s_sparsity" % (V.name, argfs.name),
nest=False,
block_sparse=True)
Expand Down
6 changes: 4 additions & 2 deletions firedrake/preconditioners/pmg.py
Original file line number Diff line number Diff line change
Expand Up @@ -1535,8 +1535,10 @@ def prolongation_matrix_aij(P1, Pk, P1_bcs=[], Pk_bcs=[]):
Pk = Pk.function_space()
sp = op2.Sparsity((Pk.dof_dset,
P1.dof_dset),
(Pk.cell_node_map(),
P1.cell_node_map()))
{(i, j): [(rmap, cmap, None)]
for i, rmap in enumerate(Pk.cell_node_map())
for j, cmap in enumerate(P1.cell_node_map())
if i == j})
mat = op2.Mat(sp, PETSc.ScalarType)
mesh = Pk.mesh()

Expand Down
27 changes: 26 additions & 1 deletion tests/regression/test_assemble.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pytest
import numpy as np
from firedrake import *
from firedrake.utils import ScalarType
from firedrake.utils import ScalarType, IntType


@pytest.fixture(scope='module')
Expand Down Expand Up @@ -291,3 +291,28 @@ def test_assemble_vector_rspace_one_form(mesh):
U = inner(u, u)*dx
L = derivative(U, u)
assemble(L)


def test_assemble_sparsity_no_redundant_entries():
mesh = UnitSquareMesh(2, 2, quadrilateral=True)
V = FunctionSpace(mesh, "CG", 1)
W = V * V * V
u = TrialFunction(W)
v = TestFunction(W)
A = assemble(inner(u, v) * dx, mat_type="nest")
for i in range(len(W)):
for j in range(len(W)):
if i != j:
assert np.all(A.M.sparsity[i][j].nnz == np.zeros(9, dtype=IntType))


def test_assemble_sparsity_diagonal_entries_for_bc():
mesh = UnitSquareMesh(1, 1, quadrilateral=True)
V = FunctionSpace(mesh, "CG", 1)
W = V * V
u = TrialFunction(W)
v = TestFunction(W)
bc = DirichletBC(W.sub(1), 0, "on_boundary")
A = assemble(inner(u[1], v[0]) * dx, bcs=[bc], mat_type="nest")
# Make sure that diagonals are allocated.
assert np.all(A.M.sparsity[1][1].nnz == np.ones(4, dtype=IntType))

0 comments on commit 1a27598

Please sign in to comment.