Actual source code: matutil.c

slepc-3.19.0 2023-03-31
Report Typos and Errors
  1: /*
  2:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  3:    SLEPc - Scalable Library for Eigenvalue Problem Computations
  4:    Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain

  6:    This file is part of SLEPc.
  7:    SLEPc is distributed under a 2-clause BSD license (see LICENSE).
  8:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  9: */

 11: #include <slepc/private/slepcimpl.h>

 13: static PetscErrorCode MatCreateTile_Seq(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat G)
 14: {
 15:   PetscInt          i,j,M1,M2,N1,N2,*nnz,ncols,*scols,bs;
 16:   PetscScalar       *svals,*buf;
 17:   const PetscInt    *cols;
 18:   const PetscScalar *vals;

 20:   PetscFunctionBegin;
 21:   PetscCall(MatGetSize(A,&M1,&N1));
 22:   PetscCall(MatGetSize(D,&M2,&N2));
 23:   PetscCall(MatGetBlockSize(A,&bs));

 25:   PetscCall(PetscCalloc1((M1+M2)/bs,&nnz));
 26:   /* Preallocate for A */
 27:   if (a!=0.0) {
 28:     for (i=0;i<(M1+bs-1)/bs;i++) {
 29:       PetscCall(MatGetRow(A,i*bs,&ncols,NULL,NULL));
 30:       nnz[i] += ncols/bs;
 31:       PetscCall(MatRestoreRow(A,i*bs,&ncols,NULL,NULL));
 32:     }
 33:   }
 34:   /* Preallocate for B */
 35:   if (b!=0.0) {
 36:     for (i=0;i<(M1+bs-1)/bs;i++) {
 37:       PetscCall(MatGetRow(B,i*bs,&ncols,NULL,NULL));
 38:       nnz[i] += ncols/bs;
 39:       PetscCall(MatRestoreRow(B,i*bs,&ncols,NULL,NULL));
 40:     }
 41:   }
 42:   /* Preallocate for C */
 43:   if (c!=0.0) {
 44:     for (i=0;i<(M2+bs-1)/bs;i++) {
 45:       PetscCall(MatGetRow(C,i*bs,&ncols,NULL,NULL));
 46:       nnz[i+M1/bs] += ncols/bs;
 47:       PetscCall(MatRestoreRow(C,i*bs,&ncols,NULL,NULL));
 48:     }
 49:   }
 50:   /* Preallocate for D */
 51:   if (d!=0.0) {
 52:     for (i=0;i<(M2+bs-1)/bs;i++) {
 53:       PetscCall(MatGetRow(D,i*bs,&ncols,NULL,NULL));
 54:       nnz[i+M1/bs] += ncols/bs;
 55:       PetscCall(MatRestoreRow(D,i*bs,&ncols,NULL,NULL));
 56:     }
 57:   }
 58:   PetscCall(MatXAIJSetPreallocation(G,bs,nnz,NULL,NULL,NULL));
 59:   PetscCall(PetscFree(nnz));

 61:   PetscCall(PetscMalloc2(PetscMax(N1,N2),&buf,PetscMax(N1,N2),&scols));
 62:   /* Transfer A */
 63:   if (a!=0.0) {
 64:     for (i=0;i<M1;i++) {
 65:       PetscCall(MatGetRow(A,i,&ncols,&cols,&vals));
 66:       if (a!=1.0) {
 67:         svals=buf;
 68:         for (j=0;j<ncols;j++) svals[j] = vals[j]*a;
 69:       } else svals=(PetscScalar*)vals;
 70:       PetscCall(MatSetValues(G,1,&i,ncols,cols,svals,INSERT_VALUES));
 71:       PetscCall(MatRestoreRow(A,i,&ncols,&cols,&vals));
 72:     }
 73:   }
 74:   /* Transfer B */
 75:   if (b!=0.0) {
 76:     for (i=0;i<M1;i++) {
 77:       PetscCall(MatGetRow(B,i,&ncols,&cols,&vals));
 78:       if (b!=1.0) {
 79:         svals=buf;
 80:         for (j=0;j<ncols;j++) svals[j] = vals[j]*b;
 81:       } else svals=(PetscScalar*)vals;
 82:       for (j=0;j<ncols;j++) scols[j] = cols[j]+N1;
 83:       PetscCall(MatSetValues(G,1,&i,ncols,scols,svals,INSERT_VALUES));
 84:       PetscCall(MatRestoreRow(B,i,&ncols,&cols,&vals));
 85:     }
 86:   }
 87:   /* Transfer C */
 88:   if (c!=0.0) {
 89:     for (i=0;i<M2;i++) {
 90:       PetscCall(MatGetRow(C,i,&ncols,&cols,&vals));
 91:       if (c!=1.0) {
 92:         svals=buf;
 93:         for (j=0;j<ncols;j++) svals[j] = vals[j]*c;
 94:       } else svals=(PetscScalar*)vals;
 95:       j = i+M1;
 96:       PetscCall(MatSetValues(G,1,&j,ncols,cols,svals,INSERT_VALUES));
 97:       PetscCall(MatRestoreRow(C,i,&ncols,&cols,&vals));
 98:     }
 99:   }
100:   /* Transfer D */
101:   if (d!=0.0) {
102:     for (i=0;i<M2;i++) {
103:       PetscCall(MatGetRow(D,i,&ncols,&cols,&vals));
104:       if (d!=1.0) {
105:         svals=buf;
106:         for (j=0;j<ncols;j++) svals[j] = vals[j]*d;
107:       } else svals=(PetscScalar*)vals;
108:       for (j=0;j<ncols;j++) scols[j] = cols[j]+N1;
109:       j = i+M1;
110:       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
111:       PetscCall(MatRestoreRow(D,i,&ncols,&cols,&vals));
112:     }
113:   }
114:   PetscCall(PetscFree2(buf,scols));
115:   PetscFunctionReturn(PETSC_SUCCESS);
116: }

118: static PetscErrorCode MatCreateTile_MPI(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat G)
119: {
120:   PetscMPIInt       np;
121:   PetscInt          p,i,j,N1,N2,m1,m2,n1,n2,*map1,*map2;
122:   PetscInt          *dnz,*onz,ncols,*scols,start,gstart;
123:   PetscScalar       *svals,*buf;
124:   const PetscInt    *cols,*mapptr1,*mapptr2;
125:   const PetscScalar *vals;

127:   PetscFunctionBegin;
128:   PetscCall(MatGetSize(A,NULL,&N1));
129:   PetscCall(MatGetLocalSize(A,&m1,&n1));
130:   PetscCall(MatGetSize(D,NULL,&N2));
131:   PetscCall(MatGetLocalSize(D,&m2,&n2));

133:   /* Create mappings */
134:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)G),&np));
135:   PetscCall(MatGetOwnershipRangesColumn(A,&mapptr1));
136:   PetscCall(MatGetOwnershipRangesColumn(B,&mapptr2));
137:   PetscCall(PetscMalloc4(PetscMax(N1,N2),&buf,PetscMax(N1,N2),&scols,N1,&map1,N2,&map2));
138:   for (p=0;p<np;p++) {
139:     for (i=mapptr1[p];i<mapptr1[p+1];i++) map1[i] = i+mapptr2[p];
140:   }
141:   for (p=0;p<np;p++) {
142:     for (i=mapptr2[p];i<mapptr2[p+1];i++) map2[i] = i+mapptr1[p+1];
143:   }

145:   MatPreallocateBegin(PetscObjectComm((PetscObject)G),m1+m2,n1+n2,dnz,onz);
146:   PetscCall(MatGetOwnershipRange(G,&gstart,NULL));
147:   /* Preallocate for A */
148:   if (a!=0.0) {
149:     PetscCall(MatGetOwnershipRange(A,&start,NULL));
150:     for (i=0;i<m1;i++) {
151:       PetscCall(MatGetRow(A,i+start,&ncols,&cols,NULL));
152:       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
153:       PetscCall(MatPreallocateSet(gstart+i,ncols,scols,dnz,onz));
154:       PetscCall(MatRestoreRow(A,i+start,&ncols,&cols,NULL));
155:     }
156:   }
157:   /* Preallocate for B */
158:   if (b!=0.0) {
159:     PetscCall(MatGetOwnershipRange(B,&start,NULL));
160:     for (i=0;i<m1;i++) {
161:       PetscCall(MatGetRow(B,i+start,&ncols,&cols,NULL));
162:       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
163:       PetscCall(MatPreallocateSet(gstart+i,ncols,scols,dnz,onz));
164:       PetscCall(MatRestoreRow(B,i+start,&ncols,&cols,NULL));
165:     }
166:   }
167:   /* Preallocate for C */
168:   if (c!=0.0) {
169:     PetscCall(MatGetOwnershipRange(C,&start,NULL));
170:     for (i=0;i<m2;i++) {
171:       PetscCall(MatGetRow(C,i+start,&ncols,&cols,NULL));
172:       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
173:       PetscCall(MatPreallocateSet(gstart+m1+i,ncols,scols,dnz,onz));
174:       PetscCall(MatRestoreRow(C,i+start,&ncols,&cols,NULL));
175:     }
176:   }
177:   /* Preallocate for D */
178:   if (d!=0.0) {
179:     PetscCall(MatGetOwnershipRange(D,&start,NULL));
180:     for (i=0;i<m2;i++) {
181:       PetscCall(MatGetRow(D,i+start,&ncols,&cols,NULL));
182:       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
183:       PetscCall(MatPreallocateSet(gstart+m1+i,ncols,scols,dnz,onz));
184:       PetscCall(MatRestoreRow(D,i+start,&ncols,&cols,NULL));
185:     }
186:   }
187:   PetscCall(MatMPIAIJSetPreallocation(G,0,dnz,0,onz));
188:   MatPreallocateEnd(dnz,onz);

190:   /* Transfer A */
191:   if (a!=0.0) {
192:     PetscCall(MatGetOwnershipRange(A,&start,NULL));
193:     for (i=0;i<m1;i++) {
194:       PetscCall(MatGetRow(A,i+start,&ncols,&cols,&vals));
195:       if (a!=1.0) {
196:         svals=buf;
197:         for (j=0;j<ncols;j++) svals[j] = vals[j]*a;
198:       } else svals=(PetscScalar*)vals;
199:       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
200:       j = gstart+i;
201:       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
202:       PetscCall(MatRestoreRow(A,i+start,&ncols,&cols,&vals));
203:     }
204:   }
205:   /* Transfer B */
206:   if (b!=0.0) {
207:     PetscCall(MatGetOwnershipRange(B,&start,NULL));
208:     for (i=0;i<m1;i++) {
209:       PetscCall(MatGetRow(B,i+start,&ncols,&cols,&vals));
210:       if (b!=1.0) {
211:         svals=buf;
212:         for (j=0;j<ncols;j++) svals[j] = vals[j]*b;
213:       } else svals=(PetscScalar*)vals;
214:       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
215:       j = gstart+i;
216:       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
217:       PetscCall(MatRestoreRow(B,i+start,&ncols,&cols,&vals));
218:     }
219:   }
220:   /* Transfer C */
221:   if (c!=0.0) {
222:     PetscCall(MatGetOwnershipRange(C,&start,NULL));
223:     for (i=0;i<m2;i++) {
224:       PetscCall(MatGetRow(C,i+start,&ncols,&cols,&vals));
225:       if (c!=1.0) {
226:         svals=buf;
227:         for (j=0;j<ncols;j++) svals[j] = vals[j]*c;
228:       } else svals=(PetscScalar*)vals;
229:       for (j=0;j<ncols;j++) scols[j] = map1[cols[j]];
230:       j = gstart+m1+i;
231:       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
232:       PetscCall(MatRestoreRow(C,i+start,&ncols,&cols,&vals));
233:     }
234:   }
235:   /* Transfer D */
236:   if (d!=0.0) {
237:     PetscCall(MatGetOwnershipRange(D,&start,NULL));
238:     for (i=0;i<m2;i++) {
239:       PetscCall(MatGetRow(D,i+start,&ncols,&cols,&vals));
240:       if (d!=1.0) {
241:         svals=buf;
242:         for (j=0;j<ncols;j++) svals[j] = vals[j]*d;
243:       } else svals=(PetscScalar*)vals;
244:       for (j=0;j<ncols;j++) scols[j] = map2[cols[j]];
245:       j = gstart+m1+i;
246:       PetscCall(MatSetValues(G,1,&j,ncols,scols,svals,INSERT_VALUES));
247:       PetscCall(MatRestoreRow(D,i+start,&ncols,&cols,&vals));
248:     }
249:   }
250:   PetscCall(PetscFree4(buf,scols,map1,map2));
251:   PetscFunctionReturn(PETSC_SUCCESS);
252: }

254: /*@
255:    MatCreateTile - Explicitly build a matrix from four blocks, G = [ a*A b*B; c*C d*D ].

257:    Collective

259:    Input Parameters:
260: +  A - matrix for top-left block
261: .  a - scaling factor for block A
262: .  B - matrix for top-right block
263: .  b - scaling factor for block B
264: .  C - matrix for bottom-left block
265: .  c - scaling factor for block C
266: .  D - matrix for bottom-right block
267: -  d - scaling factor for block D

269:    Output Parameter:
270: .  G  - the resulting matrix

272:    Notes:
273:    In the case of a parallel matrix, a permuted version of G is returned. The permutation
274:    is a perfect shuffle such that the local parts of A, B, C, D remain in the local part of
275:    G for the same process.

277:    Matrix G must be destroyed by the user.

279:    The blocks can be of different type. They can be either ConstantDiagonal, or a standard
280:    type such as AIJ, or any other type provided that it supports the MatGetRow operation.
281:    The type of the output matrix will be the same as the first block that is not
282:    ConstantDiagonal (checked in the A,B,C,D order).

284:    Level: developer

286: .seealso: MatCreateNest()
287: @*/
288: PetscErrorCode MatCreateTile(PetscScalar a,Mat A,PetscScalar b,Mat B,PetscScalar c,Mat C,PetscScalar d,Mat D,Mat *G)
289: {
290:   PetscInt       i,k,M1,M2,N1,N2,M,N,m1,m2,n1,n2,m,n,bs;
291:   PetscBool      diag[4];
292:   Mat            block[4] = {A,B,C,D};
293:   MatType        type[4];
294:   PetscMPIInt    size;

296:   PetscFunctionBegin;
301:   PetscCheckSameTypeAndComm(A,2,B,4);
302:   PetscCheckSameTypeAndComm(A,2,C,6);
303:   PetscCheckSameTypeAndComm(A,2,D,8);

310:   /* check row 1 */
311:   PetscCall(MatGetSize(A,&M1,NULL));
312:   PetscCall(MatGetLocalSize(A,&m1,NULL));
313:   PetscCall(MatGetSize(B,&M,NULL));
314:   PetscCall(MatGetLocalSize(B,&m,NULL));
315:   PetscCheck(M==M1 && m==m1,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
316:   /* check row 2 */
317:   PetscCall(MatGetSize(C,&M2,NULL));
318:   PetscCall(MatGetLocalSize(C,&m2,NULL));
319:   PetscCall(MatGetSize(D,&M,NULL));
320:   PetscCall(MatGetLocalSize(D,&m,NULL));
321:   PetscCheck(M==M2 && m==m2,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
322:   /* check column 1 */
323:   PetscCall(MatGetSize(A,NULL,&N1));
324:   PetscCall(MatGetLocalSize(A,NULL,&n1));
325:   PetscCall(MatGetSize(C,NULL,&N));
326:   PetscCall(MatGetLocalSize(C,NULL,&n));
327:   PetscCheck(N==N1 && n==n1,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");
328:   /* check column 2 */
329:   PetscCall(MatGetSize(B,NULL,&N2));
330:   PetscCall(MatGetLocalSize(B,NULL,&n2));
331:   PetscCall(MatGetSize(D,NULL,&N));
332:   PetscCall(MatGetLocalSize(D,NULL,&n));
333:   PetscCheck(N==N2 && n==n2,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Incompatible dimensions");

335:   /* check matrix types */
336:   for (i=0;i<4;i++) {
337:     PetscCall(MatGetType(block[i],&type[i]));
338:     PetscCall(PetscStrcmp(type[i],MATCONSTANTDIAGONAL,&diag[i]));
339:   }
340:   for (k=0;k<4;k++) if (!diag[k]) break;
341:   PetscCheck(k<4,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Not implemented for 4 diagonal blocks");

343:   PetscCall(MatGetBlockSize(block[k],&bs));
344:   PetscCall(MatCreate(PetscObjectComm((PetscObject)block[k]),G));
345:   PetscCall(MatSetSizes(*G,m1+m2,n1+n2,M1+M2,N1+N2));
346:   PetscCall(MatSetType(*G,type[k]));
347:   PetscCall(MatSetBlockSize(*G,bs));
348:   PetscCall(MatSetUp(*G));

350:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)*G),&size));
351:   if (size>1) PetscCall(MatCreateTile_MPI(a,A,b,B,c,C,d,D,*G));
352:   else PetscCall(MatCreateTile_Seq(a,A,b,B,c,C,d,D,*G));
353:   PetscCall(MatAssemblyBegin(*G,MAT_FINAL_ASSEMBLY));
354:   PetscCall(MatAssemblyEnd(*G,MAT_FINAL_ASSEMBLY));
355:   PetscFunctionReturn(PETSC_SUCCESS);
356: }

358: /*@C
359:    MatCreateVecsEmpty - Get vector(s) compatible with the matrix, i.e. with the same
360:    parallel layout, but without internal array.

362:    Collective

364:    Input Parameter:
365: .  mat - the matrix

367:    Output Parameters:
368: +  right - (optional) vector that the matrix can be multiplied against
369: -  left - (optional) vector that the matrix vector product can be stored in

371:    Note:
372:    This is similar to MatCreateVecs(), but the new vectors do not have an internal
373:    array, so the intended usage is with VecPlaceArray().

375:    Level: developer

377: .seealso: VecDuplicateEmpty()
378: @*/
379: PetscErrorCode MatCreateVecsEmpty(Mat mat,Vec *right,Vec *left)
380: {
381:   PetscBool      standard,cuda=PETSC_FALSE,skip=PETSC_FALSE;
382:   PetscInt       M,N,mloc,nloc,rbs,cbs;
383:   PetscMPIInt    size;
384:   Vec            v;

386:   PetscFunctionBegin;

390:   PetscCall(PetscObjectTypeCompareAny((PetscObject)mat,&standard,MATSEQAIJ,MATMPIAIJ,MATSEQBAIJ,MATMPIBAIJ,MATSEQSBAIJ,MATMPISBAIJ,MATSEQDENSE,MATMPIDENSE,""));
391:   PetscCall(PetscObjectTypeCompareAny((PetscObject)mat,&cuda,MATSEQAIJCUSPARSE,MATMPIAIJCUSPARSE,""));
392:   if (!standard && !cuda) {
393:     PetscCall(MatCreateVecs(mat,right,left));
394:     v = right? *right: *left;
395:     if (v) {
396:       PetscCall(PetscObjectTypeCompareAny((PetscObject)v,&standard,VECSEQ,VECMPI,""));
397:       PetscCall(PetscObjectTypeCompareAny((PetscObject)v,&cuda,VECSEQCUDA,VECMPICUDA,""));
398:     }
399:     if (!standard && !cuda) skip = PETSC_TRUE;
400:     else {
401:       if (right) PetscCall(VecDestroy(right));
402:       if (left) PetscCall(VecDestroy(left));
403:     }
404:   }
405:   if (!skip) {
406:     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size));
407:     PetscCall(MatGetLocalSize(mat,&mloc,&nloc));
408:     PetscCall(MatGetSize(mat,&M,&N));
409:     PetscCall(MatGetBlockSizes(mat,&rbs,&cbs));
410:     if (right) {
411:       if (cuda) {
412: #if defined(PETSC_HAVE_CUDA)
413:         if (size>1) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)mat),cbs,nloc,N,NULL,right));
414:         else PetscCall(VecCreateSeqCUDAWithArray(PetscObjectComm((PetscObject)mat),cbs,N,NULL,right));
415: #endif
416:       } else {
417:         if (size>1) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat),cbs,nloc,N,NULL,right));
418:         else PetscCall(VecCreateSeqWithArray(PetscObjectComm((PetscObject)mat),cbs,N,NULL,right));
419:       }
420:     }
421:     if (left) {
422:       if (cuda) {
423: #if defined(PETSC_HAVE_CUDA)
424:         if (size>1) PetscCall(VecCreateMPICUDAWithArray(PetscObjectComm((PetscObject)mat),rbs,mloc,M,NULL,left));
425:         else PetscCall(VecCreateSeqCUDAWithArray(PetscObjectComm((PetscObject)mat),rbs,M,NULL,left));
426: #endif
427:       } else {
428:         if (size>1) PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat),rbs,mloc,M,NULL,left));
429:         else PetscCall(VecCreateSeqWithArray(PetscObjectComm((PetscObject)mat),rbs,M,NULL,left));
430:       }
431:     }
432:   }
433:   PetscFunctionReturn(PETSC_SUCCESS);
434: }

436: /*@C
437:    MatNormEstimate - Estimate the 2-norm of a matrix.

439:    Collective

441:    Input Parameters:
442: +  A   - the matrix
443: .  vrn - random vector with normally distributed entries (can be NULL)
444: -  w   - workspace vector (can be NULL)

446:    Output Parameter:
447: .  nrm - the norm estimate

449:    Notes:
450:    Does not need access to the matrix entries, just performs a matrix-vector product.
451:    Based on work by I. Ipsen and coworkers https://ipsen.math.ncsu.edu/ps/slides_ima.pdf

453:    The input vector vrn must have unit 2-norm.
454:    If vrn is NULL, then it is created internally and filled with VecSetRandomNormal().

456:    Level: developer

458: .seealso: VecSetRandomNormal()
459: @*/
460: PetscErrorCode MatNormEstimate(Mat A,Vec vrn,Vec w,PetscReal *nrm)
461: {
462:   PetscInt       n;
463:   Vec            vv=NULL,ww=NULL;

465:   PetscFunctionBegin;

472:   if (!vrn) {
473:     PetscCall(MatCreateVecs(A,&vv,NULL));
474:     vrn = vv;
475:     PetscCall(VecSetRandomNormal(vv,NULL,NULL,NULL));
476:     PetscCall(VecNormalize(vv,NULL));
477:   }
478:   if (!w) {
479:     PetscCall(MatCreateVecs(A,&ww,NULL));
480:     w = ww;
481:   }

483:   PetscCall(MatGetSize(A,&n,NULL));
484:   PetscCall(MatMult(A,vrn,w));
485:   PetscCall(VecNorm(w,NORM_2,nrm));
486:   *nrm *= PetscSqrtReal((PetscReal)n);

488:   PetscCall(VecDestroy(&vv));
489:   PetscCall(VecDestroy(&ww));
490:   PetscFunctionReturn(PETSC_SUCCESS);
491: }