Actual source code: ex51.c
slepc-3.19.0 2023-03-31
1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
11: static char help[] = "Computes a partial GSVD of two matrices from IR Tools example.\n"
12: "The command line options are:\n"
13: " -n <n>, where <n> = number of grid subdivisions in x and y dimensions.\n\n";
15: #include <slepcsvd.h>
17: /* LookUp: returns an index i such that X(i) <= y < X(i+1), where X = linspace(0,1,N).
18: Only elements start..end-1 are considered */
19: PetscErrorCode LookUp(PetscInt N,PetscInt start,PetscInt end,PetscReal y,PetscInt *i)
20: {
21: PetscInt n=end-start,j=n/2;
22: PetscReal h=1.0/(N-1);
24: PetscFunctionBeginUser;
25: if (y<(start+j)*h) PetscCall(LookUp(N,start,start+j,y,i));
26: else if (y<(start+j+1)*h) *i = start+j;
27: else PetscCall(LookUp(N,start+j,end,y,i));
28: PetscFunctionReturn(PETSC_SUCCESS);
29: }
31: /*
32: PermuteMatrices - Symmetric permutation of A using MatPartitioning, then permute
33: columns of B in the same way.
34: */
35: PetscErrorCode PermuteMatrices(Mat *A,Mat *B)
36: {
37: MatPartitioning part;
38: IS isn,is,id;
39: PetscInt *nlocal,Istart,Iend;
40: PetscMPIInt size,rank;
41: MPI_Comm comm;
42: Mat in=*A,out;
44: PetscFunctionBegin;
45: PetscCall(PetscObjectGetComm((PetscObject)in,&comm));
46: PetscCallMPI(MPI_Comm_size(comm,&size));
47: PetscCallMPI(MPI_Comm_rank(comm,&rank));
48: PetscCall(MatPartitioningCreate(comm,&part));
49: PetscCall(MatPartitioningSetAdjacency(part,in));
50: PetscCall(MatPartitioningSetFromOptions(part));
51: PetscCall(MatPartitioningApply(part,&is)); /* get owner of each vertex */
52: PetscCall(ISPartitioningToNumbering(is,&isn)); /* get new global number of each vertex */
53: PetscCall(PetscMalloc1(size,&nlocal));
54: PetscCall(ISPartitioningCount(is,size,nlocal)); /* count vertices assigned to each process */
55: PetscCall(ISDestroy(&is));
57: /* get old global number of each new global number */
58: PetscCall(ISInvertPermutation(isn,nlocal[rank],&is));
59: PetscCall(PetscFree(nlocal));
60: PetscCall(ISDestroy(&isn));
61: PetscCall(MatPartitioningDestroy(&part));
63: /* symmetric permutation of A */
64: PetscCall(MatCreateSubMatrix(in,is,is,MAT_INITIAL_MATRIX,&out));
65: PetscCall(MatDestroy(A));
66: *A = out;
68: /* column permutation of B */
69: PetscCall(MatGetOwnershipRange(*B,&Istart,&Iend));
70: PetscCall(ISCreateStride(comm,Iend-Istart,Istart,1,&id));
71: PetscCall(ISSetIdentity(id));
72: PetscCall(MatCreateSubMatrix(*B,id,is,MAT_INITIAL_MATRIX,&out));
73: PetscCall(MatDestroy(B));
74: *B = out;
75: PetscCall(ISDestroy(&id));
76: PetscCall(ISDestroy(&is));
77: PetscFunctionReturn(PETSC_SUCCESS);
78: }
80: int main(int argc,char **argv)
81: {
82: Mat A,B; /* operator matrices */
83: SVD svd; /* singular value problem solver context */
84: KSP ksp;
85: PetscInt n=32,N,i,i2,j,k,xidx,yidx,bl,Istart,Iend,col[3];
86: PetscScalar vals[] = { 1, -2, 1 },X,Y;
87: PetscBool flg,terse,permute=PETSC_FALSE;
88: PetscRandom rctx;
90: PetscFunctionBeginUser;
91: PetscCall(SlepcInitialize(&argc,&argv,(char*)0,help));
93: PetscCall(PetscOptionsGetInt(NULL,NULL,"-n",&n,&flg));
94: N = n*n;
95: PetscCall(PetscPrintf(PETSC_COMM_WORLD,"\nGSVD of inverse interpolation problem, (%" PetscInt_FMT "+%" PetscInt_FMT ")x%" PetscInt_FMT "\n\n",N,2*N,N));
97: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
98: Build the matrices
99: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101: PetscCall(PetscRandomCreate(PETSC_COMM_WORLD,&rctx));
102: PetscCall(PetscRandomSetInterval(rctx,0,1));
103: PetscCall(PetscRandomSetFromOptions(rctx));
105: PetscCall(MatCreate(PETSC_COMM_WORLD,&A));
106: PetscCall(MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,N,N));
107: PetscCall(MatSetFromOptions(A));
108: PetscCall(MatSetUp(A));
109: PetscCall(MatGetOwnershipRange(A,&Istart,&Iend));
111: /* make sure that the matrix is the same irrespective of the number of MPI processes */
112: PetscCall(PetscRandomSetSeed(rctx,0x12345678));
113: PetscCall(PetscRandomSeed(rctx));
114: for (k=0;k<Istart;k++) {
115: PetscCall(PetscRandomGetValue(rctx,&X));
116: PetscCall(PetscRandomGetValue(rctx,&Y));
117: }
119: for (k=0;k<Iend-Istart;k++) {
120: PetscCall(PetscRandomGetValue(rctx,&X));
121: PetscCall(LookUp(n,0,n,PetscRealPart(X),&xidx));
122: X = X*(n-1)-xidx; /* scale value to a 1-spaced grid */
123: PetscCall(PetscRandomGetValue(rctx,&Y));
124: PetscCall(LookUp(n,0,n,PetscRealPart(Y),&yidx));
125: Y = Y*(n-1)-yidx; /* scale value to a 1-spaced grid */
126: for (j=0;j<n;j++) {
127: for (i=0;i<n;i++) {
128: if (i<n-1 && j<n-1 && xidx==j && yidx==i) PetscCall(MatSetValue(A,Istart+k,i+j*n,1.0-X-Y+X*Y,ADD_VALUES));
129: if (i<n-1 && j>0 && xidx==j-1 && yidx==i) PetscCall(MatSetValue(A,Istart+k,i+j*n,X-X*Y,ADD_VALUES));
130: if (i>0 && j<n-1 && xidx==j && yidx==i-1) PetscCall(MatSetValue(A,Istart+k,i+j*n,Y-X*Y,ADD_VALUES));
131: if (i>0 && j>0 && xidx==j-1 && yidx==i-1) PetscCall(MatSetValue(A,Istart+k,i+j*n,X*Y,ADD_VALUES));
132: }
133: }
134: }
135: PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY));
136: PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY));
137: PetscCall(PetscRandomDestroy(&rctx));
139: PetscCall(MatCreate(PETSC_COMM_WORLD,&B));
140: PetscCall(MatSetSizes(B,PETSC_DECIDE,PETSC_DECIDE,2*N,N));
141: PetscCall(MatSetFromOptions(B));
142: PetscCall(MatSetUp(B));
144: for (i=Istart;i<Iend;i++) {
145: /* upper block: kron(speye(n),T1) where T1 is tridiagonal */
146: i2 = i+Istart;
147: if (i%n==0) PetscCall(MatSetValue(B,i2,i,1.0,INSERT_VALUES));
148: else if (i%n==n-1) {
149: PetscCall(MatSetValue(B,i2,i-1,-1.0,INSERT_VALUES));
150: PetscCall(MatSetValue(B,i2,i,1.0,INSERT_VALUES));
151: } else {
152: col[0]=i-1; col[1]=i; col[2]=i+1;
153: PetscCall(MatSetValues(B,1,&i2,3,col,vals,INSERT_VALUES));
154: }
155: /* lower block: kron(T2,speye(n)) where T2 is tridiagonal */
156: i2 = i+Iend;
157: bl = i/n; /* index of block */
158: j = i-bl*n; /* index within block */
159: if (bl==0 || bl==n-1) PetscCall(MatSetValue(B,i2,i,1.0,INSERT_VALUES));
160: else {
161: col[0]=i-n; col[1]=i; col[2]=i+n;
162: PetscCall(MatSetValues(B,1,&i2,3,col,vals,INSERT_VALUES));
163: }
164: }
165: PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
166: PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
168: PetscCall(PetscOptionsGetBool(NULL,NULL,"-permute",&permute,NULL));
169: if (permute) {
170: PetscCall(PetscPrintf(PETSC_COMM_WORLD," Permuting to optimize parallel matvec\n"));
171: PetscCall(PermuteMatrices(&A,&B));
172: }
174: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: Create the singular value solver and set various options
176: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
178: PetscCall(SVDCreate(PETSC_COMM_WORLD,&svd));
179: PetscCall(SVDSetOperators(svd,A,B));
180: PetscCall(SVDSetProblemType(svd,SVD_GENERALIZED));
182: PetscCall(SVDSetType(svd,SVDTRLANCZOS));
183: PetscCall(SVDSetDimensions(svd,6,PETSC_DEFAULT,PETSC_DEFAULT));
184: PetscCall(SVDTRLanczosSetExplicitMatrix(svd,PETSC_TRUE));
185: PetscCall(SVDTRLanczosSetScale(svd,-10));
186: PetscCall(SVDTRLanczosGetKSP(svd,&ksp));
187: PetscCall(KSPSetTolerances(ksp,1e-12,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT));
189: PetscCall(SVDSetFromOptions(svd));
191: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
192: Solve the problem and print solution
193: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
195: PetscCall(SVDSolve(svd));
197: /* show detailed info unless -terse option is given by user */
198: PetscCall(PetscOptionsHasName(NULL,NULL,"-terse",&terse));
199: if (terse) PetscCall(SVDErrorView(svd,SVD_ERROR_NORM,NULL));
200: else {
201: PetscCall(PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL));
202: PetscCall(SVDConvergedReasonView(svd,PETSC_VIEWER_STDOUT_WORLD));
203: PetscCall(SVDErrorView(svd,SVD_ERROR_NORM,PETSC_VIEWER_STDOUT_WORLD));
204: PetscCall(PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD));
205: }
206: PetscCall(SVDDestroy(&svd));
207: PetscCall(MatDestroy(&A));
208: PetscCall(MatDestroy(&B));
209: PetscCall(SlepcFinalize());
210: return 0;
211: }
213: /*TEST
215: testset:
216: args: -n 16 -terse
217: requires: double
218: output_file: output/ex51_1.out
219: test:
220: args: -svd_trlanczos_gbidiag {{upper lower}} -svd_trlanczos_oneside
221: suffix: 1
222: test:
223: suffix: 2
224: nsize: 2
225: args: -permute
226: filter: grep -v Permuting
228: TEST*/