Actual source code: htool.cxx

  1: #include <../src/mat/impls/htool/htool.hpp>
  2: #include <set>

  4: const char *const MatHtoolCompressorTypes[] = {"sympartialACA", "fullACA", "SVD"};
  5: const char *const MatHtoolClusteringTypes[] = {"PCARegular", "PCAGeometric", "BoundingBox1Regular", "BoundingBox1Geometric"};
  6: const char        HtoolCitation[]           = "@article{marchand2020two,\n"
  7:                                               "  Author = {Marchand, Pierre and Claeys, Xavier and Jolivet, Pierre and Nataf, Fr\\'ed\\'eric and Tournier, Pierre-Henri},\n"
  8:                                               "  Title = {Two-level preconditioning for $h$-version boundary element approximation of hypersingular operator with {GenEO}},\n"
  9:                                               "  Year = {2020},\n"
 10:                                               "  Publisher = {Elsevier},\n"
 11:                                               "  Journal = {Numerische Mathematik},\n"
 12:                                               "  Volume = {146},\n"
 13:                                               "  Pages = {597--628},\n"
 14:                                               "  Url = {https://github.com/htool-ddm/htool}\n"
 15:                                               "}\n";
 16: static PetscBool  HtoolCite                 = PETSC_FALSE;

 18: static PetscErrorCode MatGetDiagonal_Htool(Mat A, Vec v)
 19: {
 20:   Mat_Htool   *a;
 21:   PetscScalar *x;
 22:   PetscBool    flg;

 24:   PetscFunctionBegin;
 25:   PetscCall(MatHasCongruentLayouts(A, &flg));
 26:   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
 27:   PetscCall(MatShellGetContext(A, &a));
 28:   PetscCall(VecGetArrayWrite(v, &x));
 29:   PetscStackCallExternalVoid("copy_diagonal_in_user_numbering", htool::copy_diagonal_in_user_numbering(a->distributed_operator_holder->hmatrix, x));
 30:   PetscCall(VecRestoreArrayWrite(v, &x));
 31:   PetscFunctionReturn(PETSC_SUCCESS);
 32: }

 34: static PetscErrorCode MatGetDiagonalBlock_Htool(Mat A, Mat *b)
 35: {
 36:   Mat_Htool                 *a;
 37:   Mat                        B;
 38:   PetscScalar               *ptr, shift, scale;
 39:   PetscBool                  flg;
 40:   PetscMPIInt                rank;
 41:   htool::Cluster<PetscReal> *source_cluster = nullptr;

 43:   PetscFunctionBegin;
 44:   PetscCall(MatHasCongruentLayouts(A, &flg));
 45:   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only congruent layouts supported");
 46:   PetscCall(MatShellGetContext(A, &a));
 47:   PetscCall(PetscObjectQuery((PetscObject)A, "DiagonalBlock", (PetscObject *)&B)); /* same logic as in MatGetDiagonalBlock_MPIDense() */
 48:   if (!B) {
 49:     PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
 50:     PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, A->rmap->n, A->rmap->n, A->rmap->n, nullptr, &B));
 51:     PetscCall(MatDenseGetArrayWrite(B, &ptr));
 52:     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
 53:     source_cluster = a->source_cluster ? a->source_cluster.get() : a->target_cluster.get();
 54:     PetscStackCallExternalVoid("copy_to_dense_in_user_numbering", htool::copy_to_dense_in_user_numbering(*a->distributed_operator_holder->hmatrix.get_sub_hmatrix(a->target_cluster->get_cluster_on_partition(rank), source_cluster->get_cluster_on_partition(rank)), ptr));
 55:     PetscCall(MatDenseRestoreArrayWrite(B, &ptr));
 56:     PetscCall(MatPropagateSymmetryOptions(A, B));
 57:     PetscCall(PetscObjectCompose((PetscObject)A, "DiagonalBlock", (PetscObject)B));
 58:     *b = B;
 59:     PetscCall(MatDestroy(&B));
 60:     PetscCall(MatShift(*b, shift));
 61:     PetscCall(MatScale(*b, scale));
 62:   } else {
 63:     PetscCall(MatShellGetScalingShifts(A, (PetscScalar *)MAT_SHELL_NOT_ALLOWED, (PetscScalar *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
 64:     *b = B;
 65:   }
 66:   PetscFunctionReturn(PETSC_SUCCESS);
 67: }

 69: static PetscErrorCode MatMult_Htool(Mat A, Vec x, Vec y)
 70: {
 71:   Mat_Htool         *a;
 72:   const PetscScalar *in;
 73:   PetscScalar       *out;

 75:   PetscFunctionBegin;
 76:   PetscCall(MatShellGetContext(A, &a));
 77:   PetscCall(VecGetArrayRead(x, &in));
 78:   PetscCall(VecGetArrayWrite(y, &out));
 79:   a->distributed_operator_holder->distributed_operator.vector_product_local_to_local(in, out, nullptr);
 80:   PetscCall(VecRestoreArrayRead(x, &in));
 81:   PetscCall(VecRestoreArrayWrite(y, &out));
 82:   PetscFunctionReturn(PETSC_SUCCESS);
 83: }

 85: static PetscErrorCode MatMultTranspose_Htool(Mat A, Vec x, Vec y)
 86: {
 87:   Mat_Htool         *a;
 88:   const PetscScalar *in;
 89:   PetscScalar       *out;

 91:   PetscFunctionBegin;
 92:   PetscCall(MatShellGetContext(A, &a));
 93:   PetscCall(VecGetArrayRead(x, &in));
 94:   PetscCall(VecGetArrayWrite(y, &out));
 95:   a->distributed_operator_holder->distributed_operator.vector_product_transp_local_to_local(in, out, nullptr);
 96:   PetscCall(VecRestoreArrayRead(x, &in));
 97:   PetscCall(VecRestoreArrayWrite(y, &out));
 98:   PetscFunctionReturn(PETSC_SUCCESS);
 99: }

101: static PetscErrorCode MatIncreaseOverlap_Htool(Mat A, PetscInt is_max, IS is[], PetscInt ov)
102: {
103:   std::set<PetscInt> set;
104:   const PetscInt    *idx;
105:   PetscInt          *oidx, size, bs[2];
106:   PetscMPIInt        csize;

108:   PetscFunctionBegin;
109:   PetscCall(MatGetBlockSizes(A, bs, bs + 1));
110:   if (bs[0] != bs[1]) bs[0] = 1;
111:   for (PetscInt i = 0; i < is_max; ++i) {
112:     /* basic implementation that adds indices by shifting an IS by -ov, -ov+1..., -1, 1..., ov-1, ov */
113:     /* needed to avoid subdomain matrices to replicate A since it is dense                           */
114:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)is[i]), &csize));
115:     PetscCheck(csize == 1, PETSC_COMM_SELF, PETSC_ERR_WRONG_MPI_SIZE, "Unsupported parallel IS");
116:     PetscCall(ISGetSize(is[i], &size));
117:     PetscCall(ISGetIndices(is[i], &idx));
118:     for (PetscInt j = 0; j < size; ++j) {
119:       set.insert(idx[j]);
120:       for (PetscInt k = 1; k <= ov; ++k) {                                              /* for each layer of overlap      */
121:         if (idx[j] - k >= 0) set.insert(idx[j] - k);                                    /* do not insert negative indices */
122:         if (idx[j] + k < A->rmap->N && idx[j] + k < A->cmap->N) set.insert(idx[j] + k); /* do not insert indices greater than the dimension of A */
123:       }
124:     }
125:     PetscCall(ISRestoreIndices(is[i], &idx));
126:     PetscCall(ISDestroy(is + i));
127:     if (bs[0] > 1) {
128:       for (std::set<PetscInt>::iterator it = set.cbegin(); it != set.cend(); it++) {
129:         std::vector<PetscInt> block(bs[0]);
130:         std::iota(block.begin(), block.end(), (*it / bs[0]) * bs[0]);
131:         set.insert(block.cbegin(), block.cend());
132:       }
133:     }
134:     size = set.size(); /* size with overlap */
135:     PetscCall(PetscMalloc1(size, &oidx));
136:     for (const PetscInt j : set) *oidx++ = j;
137:     oidx -= size;
138:     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, size, oidx, PETSC_OWN_POINTER, is + i));
139:   }
140:   PetscFunctionReturn(PETSC_SUCCESS);
141: }

143: static PetscErrorCode MatCreateSubMatrices_Htool(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
144: {
145:   Mat_Htool         *a;
146:   Mat                D, B, BT;
147:   const PetscScalar *copy;
148:   PetscScalar       *ptr, shift, scale;
149:   const PetscInt    *idxr, *idxc, *it;
150:   PetscInt           nrow, m, i;
151:   PetscBool          flg;

153:   PetscFunctionBegin;
154:   PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
155:   PetscCall(MatShellGetContext(A, &a));
156:   if (scall != MAT_REUSE_MATRIX) PetscCall(PetscCalloc1(n, submat));
157:   for (i = 0; i < n; ++i) {
158:     PetscCall(ISGetLocalSize(irow[i], &nrow));
159:     PetscCall(ISGetLocalSize(icol[i], &m));
160:     PetscCall(ISGetIndices(irow[i], &idxr));
161:     PetscCall(ISGetIndices(icol[i], &idxc));
162:     if (scall != MAT_REUSE_MATRIX) PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow, m, nrow, m, nullptr, (*submat) + i));
163:     PetscCall(MatDenseGetArrayWrite((*submat)[i], &ptr));
164:     if (irow[i] == icol[i]) { /* same row and column IS? */
165:       PetscCall(MatHasCongruentLayouts(A, &flg));
166:       if (flg) {
167:         PetscCall(ISSorted(irow[i], &flg));
168:         if (flg) { /* sorted IS? */
169:           it = std::lower_bound(idxr, idxr + nrow, A->rmap->rstart);
170:           if (it != idxr + nrow && *it == A->rmap->rstart) {    /* rmap->rstart in IS? */
171:             if (std::distance(idxr, it) + A->rmap->n <= nrow) { /* long enough IS to store the local diagonal block? */
172:               for (PetscInt j = 0; j < A->rmap->n && flg; ++j)
173:                 if (PetscUnlikely(it[j] != A->rmap->rstart + j)) flg = PETSC_FALSE;
174:               if (flg) { /* complete local diagonal block in IS? */
175:                 /* fast extraction when the local diagonal block is part of the submatrix, e.g., for PCASM or PCHPDDM
176:                  *      [   B   C   E   ]
177:                  *  A = [   B   D   E   ]
178:                  *      [   B   F   E   ]
179:                  */
180:                 m = std::distance(idxr, it); /* shift of the coefficient (0,0) of block D from above */
181:                 PetscCall(MatGetDiagonalBlock(A, &D));
182:                 PetscCall(MatDenseGetArrayRead(D, &copy));
183:                 for (PetscInt k = 0; k < A->rmap->n; ++k) { PetscCall(PetscArraycpy(ptr + (m + k) * nrow + m, copy + k * A->rmap->n, A->rmap->n)); /* block D from above */ }
184:                 PetscCall(MatDenseRestoreArrayRead(D, &copy));
185:                 if (m) {
186:                   a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* vertical block B from above */
187:                   /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
188:                   if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
189:                     PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, m, A->rmap->n, m, ptr + m, &B));
190:                     PetscCall(MatDenseSetLDA(B, nrow));
191:                     PetscCall(MatCreateDense(PETSC_COMM_SELF, m, A->rmap->n, m, A->rmap->n, ptr + m * nrow, &BT));
192:                     PetscCall(MatDenseSetLDA(BT, nrow));
193:                     if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
194:                       PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
195:                     } else {
196:                       PetscCall(MatTransposeSetPrecursor(B, BT));
197:                       PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
198:                     }
199:                     PetscCall(MatDestroy(&B));
200:                     PetscCall(MatDestroy(&BT));
201:                   } else {
202:                     for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block C from above */
203:                       a->wrapper->copy_submatrix(m, 1, idxr, idxc + m + k, ptr + (m + k) * nrow);
204:                     }
205:                   }
206:                 }
207:                 if (m + A->rmap->n != nrow) {
208:                   a->wrapper->copy_submatrix(nrow, std::distance(it + A->rmap->n, idxr + nrow), idxr, idxc + m + A->rmap->n, ptr + (m + A->rmap->n) * nrow); /* vertical block E from above */
209:                   /* entry-wise assembly may be costly, so transpose already-computed entries when possible */
210:                   if (A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE) {
211:                     PetscCall(MatCreateDense(PETSC_COMM_SELF, A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), ptr + (m + A->rmap->n) * nrow + m, &B));
212:                     PetscCall(MatDenseSetLDA(B, nrow));
213:                     PetscCall(MatCreateDense(PETSC_COMM_SELF, nrow - (m + A->rmap->n), A->rmap->n, nrow - (m + A->rmap->n), A->rmap->n, ptr + m * nrow + m + A->rmap->n, &BT));
214:                     PetscCall(MatDenseSetLDA(BT, nrow));
215:                     if (A->hermitian == PETSC_BOOL3_TRUE && PetscDefined(USE_COMPLEX)) {
216:                       PetscCall(MatHermitianTranspose(B, MAT_REUSE_MATRIX, &BT));
217:                     } else {
218:                       PetscCall(MatTransposeSetPrecursor(B, BT));
219:                       PetscCall(MatTranspose(B, MAT_REUSE_MATRIX, &BT));
220:                     }
221:                     PetscCall(MatDestroy(&B));
222:                     PetscCall(MatDestroy(&BT));
223:                   } else {
224:                     for (PetscInt k = 0; k < A->rmap->n; ++k) { /* block F from above */
225:                       a->wrapper->copy_submatrix(std::distance(it + A->rmap->n, idxr + nrow), 1, it + A->rmap->n, idxc + m + k, ptr + (m + k) * nrow + m + A->rmap->n);
226:                     }
227:                   }
228:                 }
229:               } /* complete local diagonal block not in IS */
230:             } else flg = PETSC_FALSE; /* IS not long enough to store the local diagonal block */
231:           } else flg = PETSC_FALSE;   /* rmap->rstart not in IS */
232:         } /* unsorted IS */
233:       }
234:     } else flg = PETSC_FALSE;                                       /* different row and column IS */
235:     if (!flg) a->wrapper->copy_submatrix(nrow, m, idxr, idxc, ptr); /* reassemble everything */
236:     PetscCall(ISRestoreIndices(irow[i], &idxr));
237:     PetscCall(ISRestoreIndices(icol[i], &idxc));
238:     PetscCall(MatDenseRestoreArrayWrite((*submat)[i], &ptr));
239:     PetscCall(MatShift((*submat)[i], shift));
240:     PetscCall(MatScale((*submat)[i], scale));
241:   }
242:   PetscFunctionReturn(PETSC_SUCCESS);
243: }

245: static PetscErrorCode MatDestroy_Htool(Mat A)
246: {
247:   Mat_Htool               *a;
248:   PetscContainer           container;
249:   MatHtoolKernelTranspose *kernelt;

251:   PetscFunctionBegin;
252:   PetscCall(MatShellGetContext(A, &a));
253:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", nullptr));
254:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", nullptr));
255:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", nullptr));
256:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", nullptr));
257:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", nullptr));
258:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", nullptr));
259:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", nullptr));
260:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", nullptr));
261:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", nullptr));
262:   PetscCall(PetscObjectQuery((PetscObject)A, "KernelTranspose", (PetscObject *)&container));
263:   if (container) { /* created in MatTranspose_Htool() */
264:     PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
265:     PetscCall(MatDestroy(&kernelt->A));
266:     PetscCall(PetscObjectCompose((PetscObject)A, "KernelTranspose", nullptr));
267:   }
268:   if (a->gcoords_source != a->gcoords_target) PetscCall(PetscFree(a->gcoords_source));
269:   PetscCall(PetscFree(a->gcoords_target));
270:   PetscCall(PetscFree2(a->work_source, a->work_target));
271:   delete a->wrapper;
272:   a->target_cluster.reset();
273:   a->source_cluster.reset();
274:   a->distributed_operator_holder.reset();
275:   PetscCall(PetscFree(a));
276:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContext_C", nullptr)); // needed to avoid a call to MatShellSetContext_Immutable()
277:   PetscFunctionReturn(PETSC_SUCCESS);
278: }

280: static PetscErrorCode MatView_Htool(Mat A, PetscViewer pv)
281: {
282:   Mat_Htool                         *a;
283:   PetscScalar                        shift, scale;
284:   PetscBool                          flg;
285:   std::map<std::string, std::string> hmatrix_information;

287:   PetscFunctionBegin;
288:   PetscCall(MatShellGetContext(A, &a));
289:   hmatrix_information = htool::get_distributed_hmatrix_information(a->distributed_operator_holder->hmatrix, PetscObjectComm((PetscObject)A));
290:   PetscCall(PetscObjectTypeCompare((PetscObject)pv, PETSCVIEWERASCII, &flg));
291:   if (flg) {
292:     PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
293:     PetscCall(PetscViewerASCIIPrintf(pv, "symmetry: %c\n", a->distributed_operator_holder->distributed_operator.get_symmetry_type()));
294:     if (PetscAbsScalar(scale - 1.0) > PETSC_MACHINE_EPSILON) {
295: #if defined(PETSC_USE_COMPLEX)
296:       PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g+%gi\n", (double)PetscRealPart(scale), (double)PetscImaginaryPart(scale)));
297: #else
298:       PetscCall(PetscViewerASCIIPrintf(pv, "scaling: %g\n", (double)scale));
299: #endif
300:     }
301:     if (PetscAbsScalar(shift) > PETSC_MACHINE_EPSILON) {
302: #if defined(PETSC_USE_COMPLEX)
303:       PetscCall(PetscViewerASCIIPrintf(pv, "shift: %g+%gi\n", (double)PetscRealPart(shift), (double)PetscImaginaryPart(shift)));
304: #else
305:       PetscCall(PetscViewerASCIIPrintf(pv, "shift: %g\n", (double)shift));
306: #endif
307:     }
308:     PetscCall(PetscViewerASCIIPrintf(pv, "minimum cluster size: %" PetscInt_FMT "\n", a->min_cluster_size));
309:     PetscCall(PetscViewerASCIIPrintf(pv, "epsilon: %g\n", (double)a->epsilon));
310:     PetscCall(PetscViewerASCIIPrintf(pv, "eta: %g\n", (double)a->eta));
311:     PetscCall(PetscViewerASCIIPrintf(pv, "minimum target depth: %" PetscInt_FMT "\n", a->depth[0]));
312:     PetscCall(PetscViewerASCIIPrintf(pv, "minimum source depth: %" PetscInt_FMT "\n", a->depth[1]));
313:     PetscCall(PetscViewerASCIIPrintf(pv, "compressor: %s\n", MatHtoolCompressorTypes[a->compressor]));
314:     PetscCall(PetscViewerASCIIPrintf(pv, "clustering: %s\n", MatHtoolClusteringTypes[a->clustering]));
315:     PetscCall(PetscViewerASCIIPrintf(pv, "compression ratio: %s\n", hmatrix_information["Compression_ratio"].c_str()));
316:     PetscCall(PetscViewerASCIIPrintf(pv, "space saving: %s\n", hmatrix_information["Space_saving"].c_str()));
317:     PetscCall(PetscViewerASCIIPrintf(pv, "block tree consistency: %s\n", PetscBools[a->distributed_operator_holder->hmatrix.is_block_tree_consistent()]));
318:     PetscCall(PetscViewerASCIIPrintf(pv, "number of dense (resp. low rank) matrices: %s (resp. %s)\n", hmatrix_information["Number_of_dense_blocks"].c_str(), hmatrix_information["Number_of_low_rank_blocks"].c_str()));
319:     PetscCall(
320:       PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) dense block sizes: (%s, %s, %s)\n", hmatrix_information["Dense_block_size_min"].c_str(), hmatrix_information["Dense_block_size_mean"].c_str(), hmatrix_information["Dense_block_size_max"].c_str()));
321:     PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) low rank block sizes: (%s, %s, %s)\n", hmatrix_information["Low_rank_block_size_min"].c_str(), hmatrix_information["Low_rank_block_size_mean"].c_str(),
322:                                      hmatrix_information["Low_rank_block_size_max"].c_str()));
323:     PetscCall(PetscViewerASCIIPrintf(pv, "(minimum, mean, maximum) ranks: (%s, %s, %s)\n", hmatrix_information["Rank_min"].c_str(), hmatrix_information["Rank_mean"].c_str(), hmatrix_information["Rank_max"].c_str()));
324:   }
325:   PetscFunctionReturn(PETSC_SUCCESS);
326: }

328: /* naive implementation of MatGetRow() needed for MatConvert_Nest_AIJ() */
329: static PetscErrorCode MatGetRow_Htool(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
330: {
331:   Mat_Htool   *a;
332:   PetscScalar  shift, scale;
333:   PetscInt    *idxc;
334:   PetscBLASInt one = 1, bn;

336:   PetscFunctionBegin;
337:   PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
338:   PetscCall(MatShellGetContext(A, &a));
339:   if (nz) *nz = A->cmap->N;
340:   if (idx || v) { /* even if !idx, need to set idxc for htool::copy_submatrix() */
341:     PetscCall(PetscMalloc1(A->cmap->N, &idxc));
342:     for (PetscInt i = 0; i < A->cmap->N; ++i) idxc[i] = i;
343:   }
344:   if (idx) *idx = idxc;
345:   if (v) {
346:     PetscCall(PetscMalloc1(A->cmap->N, v));
347:     if (a->wrapper) a->wrapper->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
348:     else reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx)->copy_submatrix(1, A->cmap->N, &row, idxc, *v);
349:     PetscCall(PetscBLASIntCast(A->cmap->N, &bn));
350:     PetscCallCXX(htool::Blas<PetscScalar>::scal(&bn, &scale, *v, &one));
351:     if (row < A->cmap->N) (*v)[row] += shift;
352:   }
353:   if (!idx) PetscCall(PetscFree(idxc));
354:   PetscFunctionReturn(PETSC_SUCCESS);
355: }

357: static PetscErrorCode MatRestoreRow_Htool(Mat, PetscInt, PetscInt *, PetscInt **idx, PetscScalar **v)
358: {
359:   PetscFunctionBegin;
360:   if (idx) PetscCall(PetscFree(*idx));
361:   if (v) PetscCall(PetscFree(*v));
362:   PetscFunctionReturn(PETSC_SUCCESS);
363: }

365: static PetscErrorCode MatSetFromOptions_Htool(Mat A, PetscOptionItems PetscOptionsObject)
366: {
367:   Mat_Htool *a;
368:   PetscInt   n;
369:   PetscBool  flg;

371:   PetscFunctionBegin;
372:   PetscCall(MatShellGetContext(A, &a));
373:   PetscOptionsHeadBegin(PetscOptionsObject, "Htool options");
374:   PetscCall(PetscOptionsBoundedInt("-mat_htool_min_cluster_size", "Minimal leaf size in cluster tree", nullptr, a->min_cluster_size, &a->min_cluster_size, nullptr, 0));
375:   PetscCall(PetscOptionsBoundedReal("-mat_htool_epsilon", "Relative error in Frobenius norm when approximating a block", nullptr, a->epsilon, &a->epsilon, nullptr, 0.0));
376:   PetscCall(PetscOptionsReal("-mat_htool_eta", "Admissibility condition tolerance", nullptr, a->eta, &a->eta, nullptr));
377:   PetscCall(PetscOptionsBoundedInt("-mat_htool_min_target_depth", "Minimal cluster tree depth associated with the rows", nullptr, a->depth[0], a->depth, nullptr, 0));
378:   PetscCall(PetscOptionsBoundedInt("-mat_htool_min_source_depth", "Minimal cluster tree depth associated with the columns", nullptr, a->depth[1], a->depth + 1, nullptr, 0));
379:   PetscCall(PetscOptionsBool("-mat_htool_block_tree_consistency", "Block tree consistency", nullptr, a->block_tree_consistency, &a->block_tree_consistency, nullptr));

381:   n = 0;
382:   PetscCall(PetscOptionsEList("-mat_htool_compressor", "Type of compression", "MatHtoolCompressorType", MatHtoolCompressorTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolCompressorTypes), MatHtoolCompressorTypes[MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA], &n, &flg));
383:   if (flg) a->compressor = MatHtoolCompressorType(n);
384:   n = 0;
385:   PetscCall(PetscOptionsEList("-mat_htool_clustering", "Type of clustering", "MatHtoolClusteringType", MatHtoolClusteringTypes, PETSC_STATIC_ARRAY_LENGTH(MatHtoolClusteringTypes), MatHtoolClusteringTypes[MAT_HTOOL_CLUSTERING_PCA_REGULAR], &n, &flg));
386:   if (flg) a->clustering = MatHtoolClusteringType(n);
387:   PetscOptionsHeadEnd();
388:   PetscFunctionReturn(PETSC_SUCCESS);
389: }

391: static PetscErrorCode MatAssemblyEnd_Htool(Mat A, MatAssemblyType)
392: {
393:   Mat_Htool                                                   *a;
394:   const PetscInt                                              *ranges;
395:   PetscInt                                                    *offset;
396:   PetscMPIInt                                                  size, rank;
397:   char                                                         S = PetscDefined(USE_COMPLEX) && A->hermitian == PETSC_BOOL3_TRUE ? 'H' : (A->symmetric == PETSC_BOOL3_TRUE ? 'S' : 'N'), uplo = S == 'N' ? 'N' : 'U';
398:   htool::VirtualGenerator<PetscScalar>                        *generator = nullptr;
399:   htool::ClusterTreeBuilder<PetscReal>                         recursive_build_strategy;
400:   htool::Cluster<PetscReal>                                   *source_cluster;
401:   std::shared_ptr<htool::VirtualLowRankGenerator<PetscScalar>> compressor;

403:   PetscFunctionBegin;
404:   PetscCall(PetscCitationsRegister(HtoolCitation, &HtoolCite));
405:   PetscCall(MatShellGetContext(A, &a));
406:   delete a->wrapper;
407:   a->target_cluster.reset();
408:   a->source_cluster.reset();
409:   a->distributed_operator_holder.reset();
410:   // clustering
411:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
412:   PetscCall(PetscMalloc1(2 * size, &offset));
413:   PetscCall(MatGetOwnershipRanges(A, &ranges));
414:   for (PetscInt i = 0; i < size; ++i) {
415:     offset[2 * i]     = ranges[i];
416:     offset[2 * i + 1] = ranges[i + 1] - ranges[i];
417:   }
418:   switch (a->clustering) {
419:   case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
420:     recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeLargestExtent<PetscReal>>());
421:     recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::GeometricSplitting<PetscReal>>());
422:     break;
423:   case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
424:     recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeBoundingBox<PetscReal>>());
425:     recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::GeometricSplitting<PetscReal>>());
426:     break;
427:   case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
428:     recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeBoundingBox<PetscReal>>());
429:     recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::RegularSplitting<PetscReal>>());
430:     break;
431:   default:
432:     recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeLargestExtent<PetscReal>>());
433:     recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::RegularSplitting<PetscReal>>());
434:   }
435:   recursive_build_strategy.set_minclustersize(a->min_cluster_size);
436:   a->target_cluster = std::make_unique<htool::Cluster<PetscReal>>(recursive_build_strategy.create_cluster_tree(A->rmap->N, a->dim, a->gcoords_target, 2, size, offset));
437:   if (a->gcoords_target != a->gcoords_source) {
438:     PetscCall(MatGetOwnershipRangesColumn(A, &ranges));
439:     for (PetscInt i = 0; i < size; ++i) {
440:       offset[2 * i]     = ranges[i];
441:       offset[2 * i + 1] = ranges[i + 1] - ranges[i];
442:     }
443:     switch (a->clustering) {
444:     case MAT_HTOOL_CLUSTERING_PCA_GEOMETRIC:
445:       recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeLargestExtent<PetscReal>>());
446:       recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::GeometricSplitting<PetscReal>>());
447:       break;
448:     case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_GEOMETRIC:
449:       recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeBoundingBox<PetscReal>>());
450:       recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::GeometricSplitting<PetscReal>>());
451:       break;
452:     case MAT_HTOOL_CLUSTERING_BOUNDING_BOX_1_REGULAR:
453:       recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeBoundingBox<PetscReal>>());
454:       recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::RegularSplitting<PetscReal>>());
455:       break;
456:     default:
457:       recursive_build_strategy.set_direction_computation_strategy(std::make_shared<htool::ComputeLargestExtent<PetscReal>>());
458:       recursive_build_strategy.set_splitting_strategy(std::make_shared<htool::RegularSplitting<PetscReal>>());
459:     }
460:     recursive_build_strategy.set_minclustersize(a->min_cluster_size);
461:     a->source_cluster = std::make_unique<htool::Cluster<PetscReal>>(recursive_build_strategy.create_cluster_tree(A->cmap->N, a->dim, a->gcoords_source, 2, size, offset));
462:     S = uplo       = 'N';
463:     source_cluster = a->source_cluster.get();
464:   } else source_cluster = a->target_cluster.get();
465:   PetscCall(PetscFree(offset));
466:   // generator
467:   if (a->kernel) a->wrapper = new WrapperHtool(a->dim, a->kernel, a->kernelctx);
468:   else {
469:     a->wrapper = nullptr;
470:     generator  = reinterpret_cast<htool::VirtualGenerator<PetscScalar> *>(a->kernelctx);
471:   }
472:   // compressor
473:   switch (a->compressor) {
474:   case MAT_HTOOL_COMPRESSOR_FULL_ACA:
475:     compressor = std::make_shared<htool::fullACA<PetscScalar>>();
476:     break;
477:   case MAT_HTOOL_COMPRESSOR_SVD:
478:     compressor = std::make_shared<htool::SVD<PetscScalar>>();
479:     break;
480:   default:
481:     compressor = std::make_shared<htool::sympartialACA<PetscScalar>>();
482:   }
483:   // local hierarchical matrix
484:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
485:   auto hmatrix_builder = htool::HMatrixTreeBuilder<PetscScalar>(*a->target_cluster, *source_cluster, a->epsilon, a->eta, S, uplo, -1, rank, rank);
486:   hmatrix_builder.set_low_rank_generator(compressor);
487:   hmatrix_builder.set_minimal_target_depth(a->depth[0]);
488:   hmatrix_builder.set_minimal_source_depth(a->depth[1]);
489:   PetscCheck(a->block_tree_consistency || (!a->block_tree_consistency && !(A->symmetric == PETSC_BOOL3_TRUE || A->hermitian == PETSC_BOOL3_TRUE)), PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot have a MatHtool with inconsistent block tree which is either symmetric or Hermitian");
490:   hmatrix_builder.set_block_tree_consistency(a->block_tree_consistency);
491:   a->distributed_operator_holder = std::make_unique<htool::DistributedOperatorFromHMatrix<PetscScalar>>(a->wrapper ? *a->wrapper : *generator, *a->target_cluster, *source_cluster, hmatrix_builder, PetscObjectComm((PetscObject)A));
492:   PetscFunctionReturn(PETSC_SUCCESS);
493: }

495: static PetscErrorCode MatProductNumeric_Htool(Mat C)
496: {
497:   Mat_Product       *product = C->product;
498:   Mat_Htool         *a;
499:   const PetscScalar *in;
500:   PetscScalar       *out;
501:   PetscInt           N, lda;

503:   PetscFunctionBegin;
504:   MatCheckProduct(C, 1);
505:   PetscCall(MatGetSize(C, nullptr, &N));
506:   PetscCall(MatDenseGetLDA(C, &lda));
507:   PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
508:   PetscCall(MatDenseGetArrayRead(product->B, &in));
509:   PetscCall(MatDenseGetArrayWrite(C, &out));
510:   PetscCall(MatShellGetContext(product->A, &a));
511:   switch (product->type) {
512:   case MATPRODUCT_AB:
513:     a->distributed_operator_holder->distributed_operator.matrix_product_local_to_local(in, out, N, nullptr);
514:     break;
515:   case MATPRODUCT_AtB:
516:     a->distributed_operator_holder->distributed_operator.matrix_product_transp_local_to_local(in, out, N, nullptr);
517:     break;
518:   default:
519:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatProductType %s is not supported", MatProductTypes[product->type]);
520:   }
521:   PetscCall(MatDenseRestoreArrayWrite(C, &out));
522:   PetscCall(MatDenseRestoreArrayRead(product->B, &in));
523:   PetscFunctionReturn(PETSC_SUCCESS);
524: }

526: static PetscErrorCode MatProductSymbolic_Htool(Mat C)
527: {
528:   Mat_Product *product = C->product;
529:   Mat          A, B;
530:   PetscBool    flg;

532:   PetscFunctionBegin;
533:   MatCheckProduct(C, 1);
534:   A = product->A;
535:   B = product->B;
536:   PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &flg, MATSEQDENSE, MATMPIDENSE, ""));
537:   PetscCheck(flg && (product->type == MATPRODUCT_AB || product->type == MATPRODUCT_AtB), PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "ProductType %s not supported for %s", MatProductTypes[product->type], ((PetscObject)product->B)->type_name);
538:   if (C->rmap->n == PETSC_DECIDE || C->cmap->n == PETSC_DECIDE || C->rmap->N == PETSC_DECIDE || C->cmap->N == PETSC_DECIDE) {
539:     if (product->type == MATPRODUCT_AB) PetscCall(MatSetSizes(C, A->rmap->n, B->cmap->n, A->rmap->N, B->cmap->N));
540:     else PetscCall(MatSetSizes(C, A->cmap->n, B->cmap->n, A->cmap->N, B->cmap->N));
541:   }
542:   PetscCall(MatSetType(C, MATDENSE));
543:   PetscCall(MatSetUp(C));
544:   PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
545:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
546:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
547:   C->ops->productsymbolic = nullptr;
548:   C->ops->productnumeric  = MatProductNumeric_Htool;
549:   PetscFunctionReturn(PETSC_SUCCESS);
550: }

552: static PetscErrorCode MatProductSetFromOptions_Htool(Mat C)
553: {
554:   PetscFunctionBegin;
555:   MatCheckProduct(C, 1);
556:   if (C->product->type == MATPRODUCT_AB || C->product->type == MATPRODUCT_AtB) C->ops->productsymbolic = MatProductSymbolic_Htool;
557:   PetscFunctionReturn(PETSC_SUCCESS);
558: }

560: static PetscErrorCode MatHtoolGetHierarchicalMat_Htool(Mat A, const htool::DistributedOperator<PetscScalar> **distributed_operator)
561: {
562:   Mat_Htool *a;

564:   PetscFunctionBegin;
565:   PetscCall(MatShellGetContext(A, &a));
566:   *distributed_operator = &a->distributed_operator_holder->distributed_operator;
567:   PetscFunctionReturn(PETSC_SUCCESS);
568: }

570: /*@C
571:   MatHtoolGetHierarchicalMat - Retrieves the opaque pointer to a Htool virtual matrix stored in a `MATHTOOL`.

573:   No Fortran Support, No C Support

575:   Input Parameter:
576: . A - hierarchical matrix

578:   Output Parameter:
579: . distributed_operator - opaque pointer to a Htool virtual matrix

581:   Level: advanced

583: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`
584: @*/
585: PETSC_EXTERN PetscErrorCode MatHtoolGetHierarchicalMat(Mat A, const htool::DistributedOperator<PetscScalar> **distributed_operator)
586: {
587:   PetscFunctionBegin;
589:   PetscAssertPointer(distributed_operator, 2);
590:   PetscTryMethod(A, "MatHtoolGetHierarchicalMat_C", (Mat, const htool::DistributedOperator<PetscScalar> **), (A, distributed_operator));
591:   PetscFunctionReturn(PETSC_SUCCESS);
592: }

594: static PetscErrorCode MatHtoolSetKernel_Htool(Mat A, MatHtoolKernelFn *kernel, void *kernelctx)
595: {
596:   Mat_Htool *a;

598:   PetscFunctionBegin;
599:   PetscCall(MatShellGetContext(A, &a));
600:   a->kernel    = kernel;
601:   a->kernelctx = kernelctx;
602:   delete a->wrapper;
603:   if (a->kernel) a->wrapper = new WrapperHtool(a->dim, a->kernel, a->kernelctx);
604:   PetscFunctionReturn(PETSC_SUCCESS);
605: }

607: /*@C
608:   MatHtoolSetKernel - Sets the kernel and context used for the assembly of a `MATHTOOL`.

610:   Collective, No Fortran Support

612:   Input Parameters:
613: + A         - hierarchical matrix
614: . kernel    - computational kernel (or `NULL`)
615: - kernelctx - kernel context (if kernel is `NULL`, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)

617:   Level: advanced

619: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatCreateHtoolFromKernel()`
620: @*/
621: PetscErrorCode MatHtoolSetKernel(Mat A, MatHtoolKernelFn *kernel, void *kernelctx)
622: {
623:   PetscFunctionBegin;
626:   if (!kernel) PetscAssertPointer(kernelctx, 3);
627:   PetscTryMethod(A, "MatHtoolSetKernel_C", (Mat, MatHtoolKernelFn *, void *), (A, kernel, kernelctx));
628:   PetscFunctionReturn(PETSC_SUCCESS);
629: }

631: static PetscErrorCode MatHtoolGetPermutationSource_Htool(Mat A, IS *is)
632: {
633:   Mat_Htool                       *a;
634:   PetscMPIInt                      rank;
635:   const std::vector<PetscInt>     *source;
636:   const htool::Cluster<PetscReal> *local_source_cluster;

638:   PetscFunctionBegin;
639:   PetscCall(MatShellGetContext(A, &a));
640:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
641:   local_source_cluster = a->source_cluster ? &a->source_cluster->get_cluster_on_partition(rank) : &a->target_cluster->get_cluster_on_partition(rank);
642:   source               = &local_source_cluster->get_permutation();
643:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), local_source_cluster->get_size(), source->data() + local_source_cluster->get_offset(), PETSC_COPY_VALUES, is));
644:   PetscCall(ISSetPermutation(*is));
645:   PetscFunctionReturn(PETSC_SUCCESS);
646: }

648: /*@
649:   MatHtoolGetPermutationSource - Gets the permutation associated to the source cluster for a `MATHTOOL` matrix.

651:   Input Parameter:
652: . A - hierarchical matrix

654:   Output Parameter:
655: . is - permutation

657:   Level: advanced

659: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationTarget()`, `MatHtoolUsePermutation()`
660: @*/
661: PetscErrorCode MatHtoolGetPermutationSource(Mat A, IS *is)
662: {
663:   PetscFunctionBegin;
665:   if (!is) PetscAssertPointer(is, 2);
666:   PetscTryMethod(A, "MatHtoolGetPermutationSource_C", (Mat, IS *), (A, is));
667:   PetscFunctionReturn(PETSC_SUCCESS);
668: }

670: static PetscErrorCode MatHtoolGetPermutationTarget_Htool(Mat A, IS *is)
671: {
672:   Mat_Htool                   *a;
673:   const std::vector<PetscInt> *target;
674:   PetscMPIInt                  rank;

676:   PetscFunctionBegin;
677:   PetscCall(MatShellGetContext(A, &a));
678:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
679:   target = &a->target_cluster->get_permutation();
680:   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), a->target_cluster->get_cluster_on_partition(rank).get_size(), target->data() + a->target_cluster->get_cluster_on_partition(rank).get_offset(), PETSC_COPY_VALUES, is));
681:   PetscCall(ISSetPermutation(*is));
682:   PetscFunctionReturn(PETSC_SUCCESS);
683: }

685: /*@
686:   MatHtoolGetPermutationTarget - Gets the permutation associated to the target cluster for a `MATHTOOL` matrix.

688:   Input Parameter:
689: . A - hierarchical matrix

691:   Output Parameter:
692: . is - permutation

694:   Level: advanced

696: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolUsePermutation()`
697: @*/
698: PetscErrorCode MatHtoolGetPermutationTarget(Mat A, IS *is)
699: {
700:   PetscFunctionBegin;
702:   if (!is) PetscAssertPointer(is, 2);
703:   PetscTryMethod(A, "MatHtoolGetPermutationTarget_C", (Mat, IS *), (A, is));
704:   PetscFunctionReturn(PETSC_SUCCESS);
705: }

707: static PetscErrorCode MatHtoolUsePermutation_Htool(Mat A, PetscBool use)
708: {
709:   Mat_Htool *a;

711:   PetscFunctionBegin;
712:   PetscCall(MatShellGetContext(A, &a));
713:   a->distributed_operator_holder->distributed_operator.use_permutation() = use;
714:   PetscFunctionReturn(PETSC_SUCCESS);
715: }

717: /*@
718:   MatHtoolUsePermutation - Sets whether a `MATHTOOL` matrix should permute input (resp. output) vectors following its internal source (resp. target) permutation.

720:   Input Parameters:
721: + A   - hierarchical matrix
722: - use - Boolean value

724:   Level: advanced

726: .seealso: [](ch_matrices), `Mat`, `MATHTOOL`, `MatHtoolGetPermutationSource()`, `MatHtoolGetPermutationTarget()`
727: @*/
728: PetscErrorCode MatHtoolUsePermutation(Mat A, PetscBool use)
729: {
730:   PetscFunctionBegin;
733:   PetscTryMethod(A, "MatHtoolUsePermutation_C", (Mat, PetscBool), (A, use));
734:   PetscFunctionReturn(PETSC_SUCCESS);
735: }

737: static PetscErrorCode MatConvert_Htool_Dense(Mat A, MatType, MatReuse reuse, Mat *B)
738: {
739:   Mat          C;
740:   Mat_Htool   *a;
741:   PetscScalar *array, shift, scale;
742:   PetscInt     lda;

744:   PetscFunctionBegin;
745:   PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
746:   PetscCall(MatShellGetContext(A, &a));
747:   if (reuse == MAT_REUSE_MATRIX) {
748:     C = *B;
749:     PetscCheck(C->rmap->n == A->rmap->n && C->cmap->N == A->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible dimensions");
750:     PetscCall(MatDenseGetLDA(C, &lda));
751:     PetscCheck(lda == C->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported leading dimension (%" PetscInt_FMT " != %" PetscInt_FMT ")", lda, C->rmap->n);
752:   } else {
753:     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
754:     PetscCall(MatSetSizes(C, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
755:     PetscCall(MatSetType(C, MATDENSE));
756:     PetscCall(MatSetUp(C));
757:     PetscCall(MatSetOption(C, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
758:   }
759:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
760:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
761:   PetscCall(MatDenseGetArrayWrite(C, &array));
762:   htool::copy_to_dense_in_user_numbering(a->distributed_operator_holder->hmatrix, array);
763:   PetscCall(MatDenseRestoreArrayWrite(C, &array));
764:   PetscCall(MatShift(C, shift));
765:   PetscCall(MatScale(C, scale));
766:   if (reuse == MAT_INPLACE_MATRIX) {
767:     PetscCall(MatHeaderReplace(A, &C));
768:   } else *B = C;
769:   PetscFunctionReturn(PETSC_SUCCESS);
770: }

772: static PetscErrorCode GenEntriesTranspose(PetscInt sdim, PetscInt M, PetscInt N, const PetscInt *rows, const PetscInt *cols, PetscScalar *ptr, void *ctx)
773: {
774:   MatHtoolKernelTranspose *generator = (MatHtoolKernelTranspose *)ctx;
775:   PetscScalar             *tmp;

777:   PetscFunctionBegin;
778:   PetscCall(generator->kernel(sdim, N, M, cols, rows, ptr, generator->kernelctx));
779:   PetscCall(PetscMalloc1(M * N, &tmp));
780:   PetscCall(PetscArraycpy(tmp, ptr, M * N));
781:   for (PetscInt i = 0; i < M; ++i) {
782:     for (PetscInt j = 0; j < N; ++j) ptr[i + j * M] = tmp[j + i * N];
783:   }
784:   PetscCall(PetscFree(tmp));
785:   PetscFunctionReturn(PETSC_SUCCESS);
786: }

788: /* naive implementation which keeps a reference to the original Mat */
789: static PetscErrorCode MatTranspose_Htool(Mat A, MatReuse reuse, Mat *B)
790: {
791:   Mat                      C;
792:   Mat_Htool               *a, *c;
793:   PetscScalar              shift, scale;
794:   PetscInt                 M = A->rmap->N, N = A->cmap->N, m = A->rmap->n, n = A->cmap->n;
795:   PetscContainer           container;
796:   MatHtoolKernelTranspose *kernelt;

798:   PetscFunctionBegin;
799:   PetscCall(MatShellGetScalingShifts(A, &shift, &scale, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Vec *)MAT_SHELL_NOT_ALLOWED, (Mat *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED, (IS *)MAT_SHELL_NOT_ALLOWED));
800:   PetscCall(MatShellGetContext(A, &a));
801:   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
802:   PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MatTranspose() with MAT_INPLACE_MATRIX not supported");
803:   if (reuse == MAT_INITIAL_MATRIX) {
804:     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
805:     PetscCall(MatSetSizes(C, n, m, N, M));
806:     PetscCall(MatSetType(C, ((PetscObject)A)->type_name));
807:     PetscCall(MatSetUp(C));
808:     PetscCall(PetscNew(&kernelt));
809:     PetscCall(PetscObjectContainerCompose((PetscObject)C, "KernelTranspose", kernelt, PetscCtxDestroyDefault));
810:   } else {
811:     C = *B;
812:     PetscCall(PetscObjectQuery((PetscObject)C, "KernelTranspose", (PetscObject *)&container));
813:     PetscCheck(container, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call MatTranspose() with MAT_INITIAL_MATRIX first");
814:     PetscCall(PetscContainerGetPointer(container, (void **)&kernelt));
815:   }
816:   PetscCall(MatShellGetContext(C, &c));
817:   c->dim = a->dim;
818:   PetscCall(MatShift(C, shift));
819:   PetscCall(MatScale(C, scale));
820:   c->kernel = GenEntriesTranspose;
821:   if (kernelt->A != A) {
822:     PetscCall(MatDestroy(&kernelt->A));
823:     kernelt->A = A;
824:     PetscCall(PetscObjectReference((PetscObject)A));
825:   }
826:   kernelt->kernel    = a->kernel;
827:   kernelt->kernelctx = a->kernelctx;
828:   c->kernelctx       = kernelt;
829:   if (reuse == MAT_INITIAL_MATRIX) {
830:     PetscCall(PetscMalloc1(N * c->dim, &c->gcoords_target));
831:     PetscCall(PetscArraycpy(c->gcoords_target, a->gcoords_source, N * c->dim));
832:     if (a->gcoords_target != a->gcoords_source) {
833:       PetscCall(PetscMalloc1(M * c->dim, &c->gcoords_source));
834:       PetscCall(PetscArraycpy(c->gcoords_source, a->gcoords_target, M * c->dim));
835:     } else c->gcoords_source = c->gcoords_target;
836:     PetscCall(PetscCalloc2(M, &c->work_source, N, &c->work_target));
837:   }
838:   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
839:   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
840:   if (reuse == MAT_INITIAL_MATRIX) *B = C;
841:   PetscFunctionReturn(PETSC_SUCCESS);
842: }

844: static PetscErrorCode MatDestroy_Factor(Mat F)
845: {
846:   PetscContainer               container;
847:   htool::HMatrix<PetscScalar> *A;

849:   PetscFunctionBegin;
850:   PetscCall(PetscObjectQuery((PetscObject)F, "HMatrix", (PetscObject *)&container));
851:   if (container) {
852:     PetscCall(PetscContainerGetPointer(container, (void **)&A));
853:     delete A;
854:     PetscCall(PetscObjectCompose((PetscObject)F, "HMatrix", nullptr));
855:   }
856:   PetscCall(PetscObjectComposeFunction((PetscObject)F, "MatFactorGetSolverType_C", nullptr));
857:   PetscFunctionReturn(PETSC_SUCCESS);
858: }

860: static PetscErrorCode MatFactorGetSolverType_Htool(Mat, MatSolverType *type)
861: {
862:   PetscFunctionBegin;
863:   *type = MATSOLVERHTOOL;
864:   PetscFunctionReturn(PETSC_SUCCESS);
865: }

867: template <char trans>
868: static inline PetscErrorCode MatSolve_Private(Mat A, htool::Matrix<PetscScalar> &X)
869: {
870:   PetscContainer               container;
871:   htool::HMatrix<PetscScalar> *B;

873:   PetscFunctionBegin;
874:   PetscCheck(A->factortype == MAT_FACTOR_LU || A->factortype == MAT_FACTOR_CHOLESKY, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_UNKNOWN_TYPE, "Only MAT_LU_FACTOR and MAT_CHOLESKY_FACTOR are supported");
875:   PetscCall(PetscObjectQuery((PetscObject)A, "HMatrix", (PetscObject *)&container));
876:   PetscCheck(container, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call Mat%sFactorNumeric() before Mat%sSolve%s()", A->factortype == MAT_FACTOR_LU ? "LU" : "Cholesky", X.nb_cols() == 1 ? "" : "Mat", trans == 'N' ? "" : "Transpose");
877:   PetscCall(PetscContainerGetPointer(container, (void **)&B));
878:   if (A->factortype == MAT_FACTOR_LU) htool::lu_solve(trans, *B, X);
879:   else htool::cholesky_solve('L', *B, X);
880:   PetscFunctionReturn(PETSC_SUCCESS);
881: }

883: template <char trans, class Type, typename std::enable_if<std::is_same<Type, Vec>::value>::type * = nullptr>
884: static PetscErrorCode MatSolve_Htool(Mat A, Type b, Type x)
885: {
886:   PetscInt                   n;
887:   htool::Matrix<PetscScalar> v;
888:   PetscScalar               *array;

890:   PetscFunctionBegin;
891:   PetscCall(VecGetLocalSize(b, &n));
892:   PetscCall(VecCopy(b, x));
893:   PetscCall(VecGetArrayWrite(x, &array));
894:   v.assign(n, 1, array, false);
895:   PetscCall(VecRestoreArrayWrite(x, &array));
896:   PetscCall(MatSolve_Private<trans>(A, v));
897:   PetscFunctionReturn(PETSC_SUCCESS);
898: }

900: template <char trans, class Type, typename std::enable_if<std::is_same<Type, Mat>::value>::type * = nullptr>
901: static PetscErrorCode MatSolve_Htool(Mat A, Type B, Type X)
902: {
903:   PetscInt                   m, N;
904:   htool::Matrix<PetscScalar> v;
905:   PetscScalar               *array;

907:   PetscFunctionBegin;
908:   PetscCall(MatGetLocalSize(B, &m, nullptr));
909:   PetscCall(MatGetLocalSize(B, nullptr, &N));
910:   PetscCall(MatCopy(B, X, SAME_NONZERO_PATTERN));
911:   PetscCall(MatDenseGetArrayWrite(X, &array));
912:   v.assign(m, N, array, false);
913:   PetscCall(MatDenseRestoreArrayWrite(X, &array));
914:   PetscCall(MatSolve_Private<trans>(A, v));
915:   PetscFunctionReturn(PETSC_SUCCESS);
916: }

918: template <MatFactorType ftype>
919: static PetscErrorCode MatFactorNumeric_Htool(Mat F, Mat A, const MatFactorInfo *)
920: {
921:   Mat_Htool                   *a;
922:   htool::HMatrix<PetscScalar> *B;

924:   PetscFunctionBegin;
925:   PetscCall(MatShellGetContext(A, &a));
926:   B = new htool::HMatrix<PetscScalar>(a->distributed_operator_holder->hmatrix);
927:   if (ftype == MAT_FACTOR_LU) htool::lu_factorization(*B);
928:   else htool::cholesky_factorization('L', *B);
929:   PetscCall(PetscObjectContainerCompose((PetscObject)F, "HMatrix", B, nullptr));
930:   PetscFunctionReturn(PETSC_SUCCESS);
931: }

933: template <MatFactorType ftype>
934: PetscErrorCode MatFactorSymbolic_Htool(Mat F, Mat)
935: {
936:   PetscFunctionBegin;
937:   F->preallocated  = PETSC_TRUE;
938:   F->assembled     = PETSC_TRUE;
939:   F->ops->solve    = MatSolve_Htool<'N', Vec>;
940:   F->ops->matsolve = MatSolve_Htool<'N', Mat>;
941:   if (!PetscDefined(USE_COMPLEX) || ftype == MAT_FACTOR_LU) {
942:     F->ops->solvetranspose    = MatSolve_Htool<'T', Vec>;
943:     F->ops->matsolvetranspose = MatSolve_Htool<'T', Mat>;
944:   }
945:   F->ops->destroy = MatDestroy_Factor;
946:   if (ftype == MAT_FACTOR_LU) F->ops->lufactornumeric = MatFactorNumeric_Htool<MAT_FACTOR_LU>;
947:   else F->ops->choleskyfactornumeric = MatFactorNumeric_Htool<MAT_FACTOR_CHOLESKY>;
948:   PetscFunctionReturn(PETSC_SUCCESS);
949: }

951: static PetscErrorCode MatLUFactorSymbolic_Htool(Mat F, Mat A, IS, IS, const MatFactorInfo *)
952: {
953:   PetscFunctionBegin;
954:   PetscCall(MatFactorSymbolic_Htool<MAT_FACTOR_LU>(F, A));
955:   PetscFunctionReturn(PETSC_SUCCESS);
956: }

958: static PetscErrorCode MatCholeskyFactorSymbolic_Htool(Mat F, Mat A, IS, const MatFactorInfo *)
959: {
960:   PetscFunctionBegin;
961:   PetscCall(MatFactorSymbolic_Htool<MAT_FACTOR_CHOLESKY>(F, A));
962:   PetscFunctionReturn(PETSC_SUCCESS);
963: }

965: static PetscErrorCode MatGetFactor_htool_htool(Mat A, MatFactorType ftype, Mat *F)
966: {
967:   Mat         B;
968:   Mat_Htool  *a;
969:   PetscMPIInt size;

971:   PetscFunctionBegin;
972:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
973:   PetscCall(MatShellGetContext(A, &a));
974:   PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_WRONG_MPI_SIZE, "Unsupported parallel MatGetFactor()");
975:   PetscCheck(a->block_tree_consistency, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot factor a MatHtool with inconsistent block tree");
976:   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
977:   PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
978:   PetscCall(PetscStrallocpy(MATSOLVERHTOOL, &((PetscObject)B)->type_name));
979:   PetscCall(MatSetUp(B));

981:   B->ops->getinfo    = MatGetInfo_External;
982:   B->factortype      = ftype;
983:   B->trivialsymbolic = PETSC_TRUE;

985:   if (ftype == MAT_FACTOR_LU) B->ops->lufactorsymbolic = MatLUFactorSymbolic_Htool;
986:   else if (ftype == MAT_FACTOR_CHOLESKY) B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_Htool;

988:   PetscCall(PetscFree(B->solvertype));
989:   PetscCall(PetscStrallocpy(MATSOLVERHTOOL, &B->solvertype));

991:   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_Htool));
992:   *F = B;
993:   PetscFunctionReturn(PETSC_SUCCESS);
994: }

996: PETSC_INTERN PetscErrorCode MatSolverTypeRegister_Htool(void)
997: {
998:   PetscFunctionBegin;
999:   PetscCall(MatSolverTypeRegister(MATSOLVERHTOOL, MATHTOOL, MAT_FACTOR_LU, MatGetFactor_htool_htool));
1000:   PetscCall(MatSolverTypeRegister(MATSOLVERHTOOL, MATHTOOL, MAT_FACTOR_CHOLESKY, MatGetFactor_htool_htool));
1001:   PetscFunctionReturn(PETSC_SUCCESS);
1002: }

1004: /*@C
1005:   MatCreateHtoolFromKernel - Creates a `MATHTOOL` from a user-supplied kernel.

1007:   Collective, No Fortran Support

1009:   Input Parameters:
1010: + comm          - MPI communicator
1011: . m             - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given)
1012: . n             - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given)
1013: . M             - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
1014: . N             - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
1015: . spacedim      - dimension of the space coordinates
1016: . coords_target - coordinates of the target
1017: . coords_source - coordinates of the source
1018: . kernel        - computational kernel (or `NULL`)
1019: - kernelctx     - kernel context (if kernel is `NULL`, the pointer must be of type htool::VirtualGenerator<PetscScalar>*)

1021:   Output Parameter:
1022: . B - matrix

1024:   Options Database Keys:
1025: + -mat_htool_min_cluster_size <`PetscInt`>                                                     - minimal leaf size in cluster tree
1026: . -mat_htool_epsilon <`PetscReal`>                                                             - relative error in Frobenius norm when approximating a block
1027: . -mat_htool_eta <`PetscReal`>                                                                 - admissibility condition tolerance
1028: . -mat_htool_min_target_depth <`PetscInt`>                                                     - minimal cluster tree depth associated with the rows
1029: . -mat_htool_min_source_depth <`PetscInt`>                                                     - minimal cluster tree depth associated with the columns
1030: . -mat_htool_block_tree_consistency <`PetscBool`>                                              - block tree consistency
1031: . -mat_htool_compressor <sympartialACA, fullACA, SVD>                                          - type of compression
1032: - -mat_htool_clustering <PCARegular, PCAGeometric, BounbingBox1Regular, BoundingBox1Geometric> - type of clustering

1034:   Level: intermediate

1036: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MATHTOOL`, `PCSetCoordinates()`, `MatHtoolSetKernel()`, `MatHtoolCompressorType`, `MATH2OPUS`, `MatCreateH2OpusFromKernel()`
1037: @*/
1038: PetscErrorCode MatCreateHtoolFromKernel(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt spacedim, const PetscReal coords_target[], const PetscReal coords_source[], MatHtoolKernelFn *kernel, void *kernelctx, Mat *B)
1039: {
1040:   Mat        A;
1041:   Mat_Htool *a;

1043:   PetscFunctionBegin;
1044:   PetscCall(MatCreate(comm, &A));
1046:   PetscAssertPointer(coords_target, 7);
1047:   PetscAssertPointer(coords_source, 8);
1049:   if (!kernel) PetscAssertPointer(kernelctx, 10);
1050:   PetscCall(MatSetSizes(A, m, n, M, N));
1051:   PetscCall(MatSetType(A, MATHTOOL));
1052:   PetscCall(MatSetUp(A));
1053:   PetscCall(MatShellGetContext(A, &a));
1054:   a->dim       = spacedim;
1055:   a->kernel    = kernel;
1056:   a->kernelctx = kernelctx;
1057:   PetscCall(PetscCalloc1(A->rmap->N * spacedim, &a->gcoords_target));
1058:   PetscCall(PetscArraycpy(a->gcoords_target + A->rmap->rstart * spacedim, coords_target, A->rmap->n * spacedim));
1059:   PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_target, A->rmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global target coordinates */
1060:   if (coords_target != coords_source) {
1061:     PetscCall(PetscCalloc1(A->cmap->N * spacedim, &a->gcoords_source));
1062:     PetscCall(PetscArraycpy(a->gcoords_source + A->cmap->rstart * spacedim, coords_source, A->cmap->n * spacedim));
1063:     PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, a->gcoords_source, A->cmap->N * spacedim, MPIU_REAL, MPI_SUM, PetscObjectComm((PetscObject)A))); /* global source coordinates */
1064:   } else a->gcoords_source = a->gcoords_target;
1065:   PetscCall(PetscCalloc2(A->cmap->N, &a->work_source, A->rmap->N, &a->work_target));
1066:   *B = A;
1067:   PetscFunctionReturn(PETSC_SUCCESS);
1068: }

1070: /*MC
1071:      MATHTOOL = "htool" - A matrix type for hierarchical matrices using the Htool package.

1073:   Use `./configure --download-htool` to install PETSc to use Htool.

1075:    Options Database Key:
1076: .     -mat_type htool - matrix type to `MATHTOOL`

1078:    Level: beginner

1080: .seealso: [](ch_matrices), `Mat`, `MATH2OPUS`, `MATDENSE`, `MatCreateHtoolFromKernel()`, `MatHtoolSetKernel()`
1081: M*/
1082: PETSC_EXTERN PetscErrorCode MatCreate_Htool(Mat A)
1083: {
1084:   Mat_Htool *a;

1086:   PetscFunctionBegin;
1087:   PetscCall(MatSetType(A, MATSHELL));
1088:   PetscCall(PetscNew(&a));
1089:   PetscCall(MatShellSetContext(A, a));
1090:   PetscCall(MatShellSetOperation(A, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiagonal_Htool));
1091:   PetscCall(MatShellSetOperation(A, MATOP_GET_DIAGONAL_BLOCK, (void (*)(void))MatGetDiagonalBlock_Htool));
1092:   PetscCall(MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Htool));
1093:   PetscCall(MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, (void (*)(void))MatMultTranspose_Htool));
1094:   if (!PetscDefined(USE_COMPLEX)) PetscCall(MatShellSetOperation(A, MATOP_MULT_HERMITIAN_TRANSPOSE, (void (*)(void))MatMultTranspose_Htool));
1095:   A->ops->increaseoverlap   = MatIncreaseOverlap_Htool;
1096:   A->ops->createsubmatrices = MatCreateSubMatrices_Htool;
1097:   PetscCall(MatShellSetOperation(A, MATOP_VIEW, (void (*)(void))MatView_Htool));
1098:   PetscCall(MatShellSetOperation(A, MATOP_SET_FROM_OPTIONS, (void (*)(void))MatSetFromOptions_Htool));
1099:   PetscCall(MatShellSetOperation(A, MATOP_GET_ROW, (void (*)(void))MatGetRow_Htool));
1100:   PetscCall(MatShellSetOperation(A, MATOP_RESTORE_ROW, (void (*)(void))MatRestoreRow_Htool));
1101:   PetscCall(MatShellSetOperation(A, MATOP_ASSEMBLY_END, (void (*)(void))MatAssemblyEnd_Htool));
1102:   PetscCall(MatShellSetOperation(A, MATOP_TRANSPOSE, (void (*)(void))MatTranspose_Htool));
1103:   PetscCall(MatShellSetOperation(A, MATOP_DESTROY, (void (*)(void))MatDestroy_Htool));
1104:   a->dim                    = 0;
1105:   a->gcoords_target         = nullptr;
1106:   a->gcoords_source         = nullptr;
1107:   a->min_cluster_size       = 10;
1108:   a->epsilon                = PetscSqrtReal(PETSC_SMALL);
1109:   a->eta                    = 10.0;
1110:   a->depth[0]               = 0;
1111:   a->depth[1]               = 0;
1112:   a->block_tree_consistency = PETSC_TRUE;
1113:   a->compressor             = MAT_HTOOL_COMPRESSOR_SYMPARTIAL_ACA;
1114:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_seqdense_C", MatProductSetFromOptions_Htool));
1115:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_htool_mpidense_C", MatProductSetFromOptions_Htool));
1116:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_seqdense_C", MatConvert_Htool_Dense));
1117:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_htool_mpidense_C", MatConvert_Htool_Dense));
1118:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetHierarchicalMat_C", MatHtoolGetHierarchicalMat_Htool));
1119:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolSetKernel_C", MatHtoolSetKernel_Htool));
1120:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationSource_C", MatHtoolGetPermutationSource_Htool));
1121:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolGetPermutationTarget_C", MatHtoolGetPermutationTarget_Htool));
1122:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHtoolUsePermutation_C", MatHtoolUsePermutation_Htool));
1123:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContext_C", MatShellSetContext_Immutable));
1124:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetContextDestroy_C", MatShellSetContextDestroy_Immutable));
1125:   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatShellSetManageScalingShifts_C", MatShellSetManageScalingShifts_Immutable));
1126:   PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATHTOOL));
1127:   PetscFunctionReturn(PETSC_SUCCESS);
1128: }