Actual source code: dsbasic.c
slepc-3.19.0 2023-03-31
1: /*
2: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
3: SLEPc - Scalable Library for Eigenvalue Problem Computations
4: Copyright (c) 2002-, Universitat Politecnica de Valencia, Spain
6: This file is part of SLEPc.
7: SLEPc is distributed under a 2-clause BSD license (see LICENSE).
8: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9: */
10: /*
11: Basic DS routines
12: */
14: #include <slepc/private/dsimpl.h>
16: PetscFunctionList DSList = NULL;
17: PetscBool DSRegisterAllCalled = PETSC_FALSE;
18: PetscClassId DS_CLASSID = 0;
19: PetscLogEvent DS_Solve = 0,DS_Vectors = 0,DS_Synchronize = 0,DS_Other = 0;
20: static PetscBool DSPackageInitialized = PETSC_FALSE;
22: const char *DSStateTypes[] = {"RAW","INTERMEDIATE","CONDENSED","TRUNCATED","DSStateType","DS_STATE_",NULL};
23: const char *DSParallelTypes[] = {"REDUNDANT","SYNCHRONIZED","DISTRIBUTED","DSParallelType","DS_PARALLEL_",NULL};
24: const char *DSMatName[DS_NUM_MAT] = {"A","B","C","T","D","Q","Z","X","Y","U","V","W","E0","E1","E2","E3","E4","E5","E6","E7","E8","E9"};
25: DSMatType DSMatExtra[DS_NUM_EXTRA] = {DS_MAT_E0,DS_MAT_E1,DS_MAT_E2,DS_MAT_E3,DS_MAT_E4,DS_MAT_E5,DS_MAT_E6,DS_MAT_E7,DS_MAT_E8,DS_MAT_E9};
27: /*@C
28: DSFinalizePackage - This function destroys everything in the SLEPc interface
29: to the DS package. It is called from SlepcFinalize().
31: Level: developer
33: .seealso: SlepcFinalize()
34: @*/
35: PetscErrorCode DSFinalizePackage(void)
36: {
37: PetscFunctionBegin;
38: PetscCall(PetscFunctionListDestroy(&DSList));
39: DSPackageInitialized = PETSC_FALSE;
40: DSRegisterAllCalled = PETSC_FALSE;
41: PetscFunctionReturn(PETSC_SUCCESS);
42: }
44: /*@C
45: DSInitializePackage - This function initializes everything in the DS package.
46: It is called from PetscDLLibraryRegister() when using dynamic libraries, and
47: on the first call to DSCreate() when using static libraries.
49: Level: developer
51: .seealso: SlepcInitialize()
52: @*/
53: PetscErrorCode DSInitializePackage(void)
54: {
55: char logList[256];
56: PetscBool opt,pkg;
57: PetscClassId classids[1];
59: PetscFunctionBegin;
60: if (DSPackageInitialized) PetscFunctionReturn(PETSC_SUCCESS);
61: DSPackageInitialized = PETSC_TRUE;
62: /* Register Classes */
63: PetscCall(PetscClassIdRegister("Direct Solver",&DS_CLASSID));
64: /* Register Constructors */
65: PetscCall(DSRegisterAll());
66: /* Register Events */
67: PetscCall(PetscLogEventRegister("DSSolve",DS_CLASSID,&DS_Solve));
68: PetscCall(PetscLogEventRegister("DSVectors",DS_CLASSID,&DS_Vectors));
69: PetscCall(PetscLogEventRegister("DSSynchronize",DS_CLASSID,&DS_Synchronize));
70: PetscCall(PetscLogEventRegister("DSOther",DS_CLASSID,&DS_Other));
71: /* Process Info */
72: classids[0] = DS_CLASSID;
73: PetscCall(PetscInfoProcessClass("ds",1,&classids[0]));
74: /* Process summary exclusions */
75: PetscCall(PetscOptionsGetString(NULL,NULL,"-log_exclude",logList,sizeof(logList),&opt));
76: if (opt) {
77: PetscCall(PetscStrInList("ds",logList,',',&pkg));
78: if (pkg) PetscCall(PetscLogEventDeactivateClass(DS_CLASSID));
79: }
80: /* Register package finalizer */
81: PetscCall(PetscRegisterFinalize(DSFinalizePackage));
82: PetscFunctionReturn(PETSC_SUCCESS);
83: }
85: /*@
86: DSCreate - Creates a DS context.
88: Collective
90: Input Parameter:
91: . comm - MPI communicator
93: Output Parameter:
94: . newds - location to put the DS context
96: Level: beginner
98: Note:
99: DS objects are not intended for normal users but only for
100: advanced user that for instance implement their own solvers.
102: .seealso: DSDestroy(), DS
103: @*/
104: PetscErrorCode DSCreate(MPI_Comm comm,DS *newds)
105: {
106: DS ds;
107: PetscInt i;
109: PetscFunctionBegin;
111: *newds = NULL;
112: PetscCall(DSInitializePackage());
113: PetscCall(SlepcHeaderCreate(ds,DS_CLASSID,"DS","Direct Solver (or Dense System)","DS",comm,DSDestroy,DSView));
115: ds->state = DS_STATE_RAW;
116: ds->method = 0;
117: ds->compact = PETSC_FALSE;
118: ds->refined = PETSC_FALSE;
119: ds->extrarow = PETSC_FALSE;
120: ds->ld = 0;
121: ds->l = 0;
122: ds->n = 0;
123: ds->k = 0;
124: ds->t = 0;
125: ds->bs = 1;
126: ds->sc = NULL;
127: ds->pmode = DS_PARALLEL_REDUNDANT;
129: for (i=0;i<DS_NUM_MAT;i++) ds->omat[i] = NULL;
130: ds->perm = NULL;
131: ds->data = NULL;
132: ds->scset = PETSC_FALSE;
133: ds->work = NULL;
134: ds->rwork = NULL;
135: ds->iwork = NULL;
136: ds->lwork = 0;
137: ds->lrwork = 0;
138: ds->liwork = 0;
140: *newds = ds;
141: PetscFunctionReturn(PETSC_SUCCESS);
142: }
144: /*@C
145: DSSetOptionsPrefix - Sets the prefix used for searching for all
146: DS options in the database.
148: Logically Collective
150: Input Parameters:
151: + ds - the direct solver context
152: - prefix - the prefix string to prepend to all DS option requests
154: Notes:
155: A hyphen (-) must NOT be given at the beginning of the prefix name.
156: The first character of all runtime options is AUTOMATICALLY the
157: hyphen.
159: Level: advanced
161: .seealso: DSAppendOptionsPrefix()
162: @*/
163: PetscErrorCode DSSetOptionsPrefix(DS ds,const char *prefix)
164: {
165: PetscFunctionBegin;
167: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)ds,prefix));
168: PetscFunctionReturn(PETSC_SUCCESS);
169: }
171: /*@C
172: DSAppendOptionsPrefix - Appends to the prefix used for searching for all
173: DS options in the database.
175: Logically Collective
177: Input Parameters:
178: + ds - the direct solver context
179: - prefix - the prefix string to prepend to all DS option requests
181: Notes:
182: A hyphen (-) must NOT be given at the beginning of the prefix name.
183: The first character of all runtime options is AUTOMATICALLY the hyphen.
185: Level: advanced
187: .seealso: DSSetOptionsPrefix()
188: @*/
189: PetscErrorCode DSAppendOptionsPrefix(DS ds,const char *prefix)
190: {
191: PetscFunctionBegin;
193: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)ds,prefix));
194: PetscFunctionReturn(PETSC_SUCCESS);
195: }
197: /*@C
198: DSGetOptionsPrefix - Gets the prefix used for searching for all
199: DS options in the database.
201: Not Collective
203: Input Parameters:
204: . ds - the direct solver context
206: Output Parameters:
207: . prefix - pointer to the prefix string used is returned
209: Note:
210: On the Fortran side, the user should pass in a string 'prefix' of
211: sufficient length to hold the prefix.
213: Level: advanced
215: .seealso: DSSetOptionsPrefix(), DSAppendOptionsPrefix()
216: @*/
217: PetscErrorCode DSGetOptionsPrefix(DS ds,const char *prefix[])
218: {
219: PetscFunctionBegin;
222: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)ds,prefix));
223: PetscFunctionReturn(PETSC_SUCCESS);
224: }
226: /*@C
227: DSSetType - Selects the type for the DS object.
229: Logically Collective
231: Input Parameters:
232: + ds - the direct solver context
233: - type - a known type
235: Level: intermediate
237: .seealso: DSGetType()
238: @*/
239: PetscErrorCode DSSetType(DS ds,DSType type)
240: {
241: PetscErrorCode (*r)(DS);
242: PetscBool match;
244: PetscFunctionBegin;
248: PetscCall(PetscObjectTypeCompare((PetscObject)ds,type,&match));
249: if (match) PetscFunctionReturn(PETSC_SUCCESS);
251: PetscCall(PetscFunctionListFind(DSList,type,&r));
252: PetscCheck(r,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested DS type %s",type);
254: PetscCall(PetscMemzero(ds->ops,sizeof(struct _DSOps)));
256: PetscCall(PetscObjectChangeTypeName((PetscObject)ds,type));
257: PetscCall((*r)(ds));
258: PetscFunctionReturn(PETSC_SUCCESS);
259: }
261: /*@C
262: DSGetType - Gets the DS type name (as a string) from the DS context.
264: Not Collective
266: Input Parameter:
267: . ds - the direct solver context
269: Output Parameter:
270: . type - name of the direct solver
272: Level: intermediate
274: .seealso: DSSetType()
275: @*/
276: PetscErrorCode DSGetType(DS ds,DSType *type)
277: {
278: PetscFunctionBegin;
281: *type = ((PetscObject)ds)->type_name;
282: PetscFunctionReturn(PETSC_SUCCESS);
283: }
285: /*@
286: DSDuplicate - Creates a new direct solver object with the same options as
287: an existing one.
289: Collective
291: Input Parameter:
292: . ds - direct solver context
294: Output Parameter:
295: . dsnew - location to put the new DS
297: Notes:
298: DSDuplicate() DOES NOT COPY the matrices, and the new DS does not even have
299: internal arrays allocated. Use DSAllocate() to use the new DS.
301: The sorting criterion options are not copied, see DSSetSlepcSC().
303: Level: intermediate
305: .seealso: DSCreate(), DSAllocate(), DSSetSlepcSC()
306: @*/
307: PetscErrorCode DSDuplicate(DS ds,DS *dsnew)
308: {
309: PetscFunctionBegin;
312: PetscCall(DSCreate(PetscObjectComm((PetscObject)ds),dsnew));
313: if (((PetscObject)ds)->type_name) PetscCall(DSSetType(*dsnew,((PetscObject)ds)->type_name));
314: (*dsnew)->method = ds->method;
315: (*dsnew)->compact = ds->compact;
316: (*dsnew)->refined = ds->refined;
317: (*dsnew)->extrarow = ds->extrarow;
318: (*dsnew)->bs = ds->bs;
319: (*dsnew)->pmode = ds->pmode;
320: PetscFunctionReturn(PETSC_SUCCESS);
321: }
323: /*@
324: DSSetMethod - Selects the method to be used to solve the problem.
326: Logically Collective
328: Input Parameters:
329: + ds - the direct solver context
330: - meth - an index identifying the method
332: Options Database Key:
333: . -ds_method <meth> - Sets the method
335: Level: intermediate
337: .seealso: DSGetMethod()
338: @*/
339: PetscErrorCode DSSetMethod(DS ds,PetscInt meth)
340: {
341: PetscFunctionBegin;
344: PetscCheck(meth>=0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The method must be a non-negative integer");
345: PetscCheck(meth<=DS_MAX_SOLVE,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Too large value for the method");
346: ds->method = meth;
347: PetscFunctionReturn(PETSC_SUCCESS);
348: }
350: /*@
351: DSGetMethod - Gets the method currently used in the DS.
353: Not Collective
355: Input Parameter:
356: . ds - the direct solver context
358: Output Parameter:
359: . meth - identifier of the method
361: Level: intermediate
363: .seealso: DSSetMethod()
364: @*/
365: PetscErrorCode DSGetMethod(DS ds,PetscInt *meth)
366: {
367: PetscFunctionBegin;
370: *meth = ds->method;
371: PetscFunctionReturn(PETSC_SUCCESS);
372: }
374: /*@
375: DSSetParallel - Selects the mode of operation in parallel runs.
377: Logically Collective
379: Input Parameters:
380: + ds - the direct solver context
381: - pmode - the parallel mode
383: Options Database Key:
384: . -ds_parallel <mode> - Sets the parallel mode, 'redundant', 'synchronized'
385: or 'distributed'
387: Notes:
388: In the 'redundant' parallel mode, all processes will make the computation
389: redundantly, starting from the same data, and producing the same result.
390: This result may be slightly different in the different processes if using a
391: multithreaded BLAS library, which may cause issues in ill-conditioned problems.
393: In the 'synchronized' parallel mode, only the first MPI process performs the
394: computation and then the computed quantities are broadcast to the other
395: processes in the communicator. This communication is not done automatically,
396: an explicit call to DSSynchronize() is required.
398: The 'distributed' parallel mode can be used in some DS types only, such
399: as the contour integral method of DSNEP. In this case, every MPI process
400: will be in charge of part of the computation.
402: Level: advanced
404: .seealso: DSSynchronize(), DSGetParallel()
405: @*/
406: PetscErrorCode DSSetParallel(DS ds,DSParallelType pmode)
407: {
408: PetscFunctionBegin;
411: ds->pmode = pmode;
412: PetscFunctionReturn(PETSC_SUCCESS);
413: }
415: /*@
416: DSGetParallel - Gets the mode of operation in parallel runs.
418: Not Collective
420: Input Parameter:
421: . ds - the direct solver context
423: Output Parameter:
424: . pmode - the parallel mode
426: Level: advanced
428: .seealso: DSSetParallel()
429: @*/
430: PetscErrorCode DSGetParallel(DS ds,DSParallelType *pmode)
431: {
432: PetscFunctionBegin;
435: *pmode = ds->pmode;
436: PetscFunctionReturn(PETSC_SUCCESS);
437: }
439: /*@
440: DSSetCompact - Switch to compact storage of matrices.
442: Logically Collective
444: Input Parameters:
445: + ds - the direct solver context
446: - comp - a boolean flag
448: Notes:
449: Compact storage is used in some DS types such as DSHEP when the matrix
450: is tridiagonal. This flag can be used to indicate whether the user
451: provides the matrix entries via the compact form (the tridiagonal DS_MAT_T)
452: or the non-compact one (DS_MAT_A).
454: The default is PETSC_FALSE.
456: Level: advanced
458: .seealso: DSGetCompact()
459: @*/
460: PetscErrorCode DSSetCompact(DS ds,PetscBool comp)
461: {
462: PetscFunctionBegin;
465: ds->compact = comp;
466: PetscFunctionReturn(PETSC_SUCCESS);
467: }
469: /*@
470: DSGetCompact - Gets the compact storage flag.
472: Not Collective
474: Input Parameter:
475: . ds - the direct solver context
477: Output Parameter:
478: . comp - the flag
480: Level: advanced
482: .seealso: DSSetCompact()
483: @*/
484: PetscErrorCode DSGetCompact(DS ds,PetscBool *comp)
485: {
486: PetscFunctionBegin;
489: *comp = ds->compact;
490: PetscFunctionReturn(PETSC_SUCCESS);
491: }
493: /*@
494: DSSetExtraRow - Sets a flag to indicate that the matrix has one extra
495: row.
497: Logically Collective
499: Input Parameters:
500: + ds - the direct solver context
501: - ext - a boolean flag
503: Notes:
504: In Krylov methods it is useful that the matrix representing the direct solver
505: has one extra row, i.e., has dimension (n+1) x n. If this flag is activated, all
506: transformations applied to the right of the matrix also affect this additional
507: row. In that case, (n+1) must be less or equal than the leading dimension.
509: The default is PETSC_FALSE.
511: Level: advanced
513: .seealso: DSSolve(), DSAllocate(), DSGetExtraRow()
514: @*/
515: PetscErrorCode DSSetExtraRow(DS ds,PetscBool ext)
516: {
517: PetscFunctionBegin;
520: PetscCheck(!ext || ds->n==0 || ds->n!=ds->ld,PetscObjectComm((PetscObject)ds),PETSC_ERR_ORDER,"Cannot set extra row after setting n=ld");
521: ds->extrarow = ext;
522: PetscFunctionReturn(PETSC_SUCCESS);
523: }
525: /*@
526: DSGetExtraRow - Gets the extra row flag.
528: Not Collective
530: Input Parameter:
531: . ds - the direct solver context
533: Output Parameter:
534: . ext - the flag
536: Level: advanced
538: .seealso: DSSetExtraRow()
539: @*/
540: PetscErrorCode DSGetExtraRow(DS ds,PetscBool *ext)
541: {
542: PetscFunctionBegin;
545: *ext = ds->extrarow;
546: PetscFunctionReturn(PETSC_SUCCESS);
547: }
549: /*@
550: DSSetRefined - Sets a flag to indicate that refined vectors must be
551: computed.
553: Logically Collective
555: Input Parameters:
556: + ds - the direct solver context
557: - ref - a boolean flag
559: Notes:
560: Normally the vectors returned in DS_MAT_X are eigenvectors of the
561: projected matrix. With this flag activated, DSVectors() will return
562: the right singular vector of the smallest singular value of matrix
563: \tilde{A}-theta*I, where \tilde{A} is the extended (n+1)xn matrix
564: and theta is the Ritz value. This is used in the refined Ritz
565: approximation.
567: The default is PETSC_FALSE.
569: Level: advanced
571: .seealso: DSVectors(), DSGetRefined()
572: @*/
573: PetscErrorCode DSSetRefined(DS ds,PetscBool ref)
574: {
575: PetscFunctionBegin;
578: ds->refined = ref;
579: PetscFunctionReturn(PETSC_SUCCESS);
580: }
582: /*@
583: DSGetRefined - Gets the refined vectors flag.
585: Not Collective
587: Input Parameter:
588: . ds - the direct solver context
590: Output Parameter:
591: . ref - the flag
593: Level: advanced
595: .seealso: DSSetRefined()
596: @*/
597: PetscErrorCode DSGetRefined(DS ds,PetscBool *ref)
598: {
599: PetscFunctionBegin;
602: *ref = ds->refined;
603: PetscFunctionReturn(PETSC_SUCCESS);
604: }
606: /*@
607: DSSetBlockSize - Sets the block size.
609: Logically Collective
611: Input Parameters:
612: + ds - the direct solver context
613: - bs - the block size
615: Options Database Key:
616: . -ds_block_size <bs> - Sets the block size
618: Level: intermediate
620: .seealso: DSGetBlockSize()
621: @*/
622: PetscErrorCode DSSetBlockSize(DS ds,PetscInt bs)
623: {
624: PetscFunctionBegin;
627: PetscCheck(bs>0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"The block size must be at least one");
628: ds->bs = bs;
629: PetscFunctionReturn(PETSC_SUCCESS);
630: }
632: /*@
633: DSGetBlockSize - Gets the block size.
635: Not Collective
637: Input Parameter:
638: . ds - the direct solver context
640: Output Parameter:
641: . bs - block size
643: Level: intermediate
645: .seealso: DSSetBlockSize()
646: @*/
647: PetscErrorCode DSGetBlockSize(DS ds,PetscInt *bs)
648: {
649: PetscFunctionBegin;
652: *bs = ds->bs;
653: PetscFunctionReturn(PETSC_SUCCESS);
654: }
656: /*@C
657: DSSetSlepcSC - Sets the sorting criterion context.
659: Logically Collective
661: Input Parameters:
662: + ds - the direct solver context
663: - sc - a pointer to the sorting criterion context
665: Level: developer
667: .seealso: DSGetSlepcSC(), DSSort()
668: @*/
669: PetscErrorCode DSSetSlepcSC(DS ds,SlepcSC sc)
670: {
671: PetscFunctionBegin;
674: if (ds->sc && !ds->scset) PetscCall(PetscFree(ds->sc));
675: ds->sc = sc;
676: ds->scset = PETSC_TRUE;
677: PetscFunctionReturn(PETSC_SUCCESS);
678: }
680: /*@C
681: DSGetSlepcSC - Gets the sorting criterion context.
683: Not Collective
685: Input Parameter:
686: . ds - the direct solver context
688: Output Parameters:
689: . sc - a pointer to the sorting criterion context
691: Level: developer
693: .seealso: DSSetSlepcSC(), DSSort()
694: @*/
695: PetscErrorCode DSGetSlepcSC(DS ds,SlepcSC *sc)
696: {
697: PetscFunctionBegin;
700: if (!ds->sc) PetscCall(PetscNew(&ds->sc));
701: *sc = ds->sc;
702: PetscFunctionReturn(PETSC_SUCCESS);
703: }
705: /*@
706: DSSetFromOptions - Sets DS options from the options database.
708: Collective
710: Input Parameters:
711: . ds - the direct solver context
713: Notes:
714: To see all options, run your program with the -help option.
716: Level: beginner
718: .seealso: DSSetOptionsPrefix()
719: @*/
720: PetscErrorCode DSSetFromOptions(DS ds)
721: {
722: PetscInt bs,meth;
723: PetscBool flag;
724: DSParallelType pmode;
726: PetscFunctionBegin;
728: PetscCall(DSRegisterAll());
729: /* Set default type (we do not allow changing it with -ds_type) */
730: if (!((PetscObject)ds)->type_name) PetscCall(DSSetType(ds,DSNHEP));
731: PetscObjectOptionsBegin((PetscObject)ds);
733: PetscCall(PetscOptionsInt("-ds_block_size","Block size for the dense system solver","DSSetBlockSize",ds->bs,&bs,&flag));
734: if (flag) PetscCall(DSSetBlockSize(ds,bs));
736: PetscCall(PetscOptionsInt("-ds_method","Method to be used for the dense system","DSSetMethod",ds->method,&meth,&flag));
737: if (flag) PetscCall(DSSetMethod(ds,meth));
739: PetscCall(PetscOptionsEnum("-ds_parallel","Operation mode in parallel runs","DSSetParallel",DSParallelTypes,(PetscEnum)ds->pmode,(PetscEnum*)&pmode,&flag));
740: if (flag) PetscCall(DSSetParallel(ds,pmode));
742: PetscTryTypeMethod(ds,setfromoptions,PetscOptionsObject);
743: PetscCall(PetscObjectProcessOptionsHandlers((PetscObject)ds,PetscOptionsObject));
744: PetscOptionsEnd();
745: PetscFunctionReturn(PETSC_SUCCESS);
746: }
748: /*@C
749: DSView - Prints the DS data structure.
751: Collective
753: Input Parameters:
754: + ds - the direct solver context
755: - viewer - optional visualization context
757: Note:
758: The available visualization contexts include
759: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
760: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
761: output where only the first processor opens
762: the file. All other processors send their
763: data to the first processor to print.
765: The user can open an alternative visualization context with
766: PetscViewerASCIIOpen() - output to a specified file.
768: Level: beginner
770: .seealso: DSViewMat()
771: @*/
772: PetscErrorCode DSView(DS ds,PetscViewer viewer)
773: {
774: PetscBool isascii;
775: PetscViewerFormat format;
776: PetscMPIInt size;
778: PetscFunctionBegin;
780: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)ds),&viewer));
782: PetscCheckSameComm(ds,1,viewer,2);
783: PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii));
784: if (isascii) {
785: PetscCall(PetscViewerGetFormat(viewer,&format));
786: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)ds,viewer));
787: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)ds),&size));
788: if (size>1) PetscCall(PetscViewerASCIIPrintf(viewer," parallel operation mode: %s\n",DSParallelTypes[ds->pmode]));
789: if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
790: PetscCall(PetscViewerASCIIPrintf(viewer," current state: %s\n",DSStateTypes[ds->state]));
791: PetscCall(PetscViewerASCIIPrintf(viewer," dimensions: ld=%" PetscInt_FMT ", n=%" PetscInt_FMT ", l=%" PetscInt_FMT ", k=%" PetscInt_FMT,ds->ld,ds->n,ds->l,ds->k));
792: if (ds->state==DS_STATE_TRUNCATED) PetscCall(PetscViewerASCIIPrintf(viewer,", t=%" PetscInt_FMT "\n",ds->t));
793: else PetscCall(PetscViewerASCIIPrintf(viewer,"\n"));
794: PetscCall(PetscViewerASCIIPrintf(viewer," flags:%s%s%s\n",ds->compact?" compact":"",ds->extrarow?" extrarow":"",ds->refined?" refined":""));
795: }
796: PetscCall(PetscViewerASCIIPushTab(viewer));
797: PetscTryTypeMethod(ds,view,viewer);
798: PetscCall(PetscViewerASCIIPopTab(viewer));
799: }
800: PetscFunctionReturn(PETSC_SUCCESS);
801: }
803: /*@C
804: DSViewFromOptions - View from options
806: Collective
808: Input Parameters:
809: + ds - the direct solver context
810: . obj - optional object
811: - name - command line option
813: Level: intermediate
815: .seealso: DSView(), DSCreate()
816: @*/
817: PetscErrorCode DSViewFromOptions(DS ds,PetscObject obj,const char name[])
818: {
819: PetscFunctionBegin;
821: PetscCall(PetscObjectViewFromOptions((PetscObject)ds,obj,name));
822: PetscFunctionReturn(PETSC_SUCCESS);
823: }
825: /*@
826: DSAllocate - Allocates memory for internal storage or matrices in DS.
828: Logically Collective
830: Input Parameters:
831: + ds - the direct solver context
832: - ld - leading dimension (maximum allowed dimension for the matrices, including
833: the extra row if present)
835: Note:
836: If the leading dimension is different from a previously set value, then
837: all matrices are destroyed with DSReset().
839: Level: intermediate
841: .seealso: DSGetLeadingDimension(), DSSetDimensions(), DSSetExtraRow(), DSReset()
842: @*/
843: PetscErrorCode DSAllocate(DS ds,PetscInt ld)
844: {
845: PetscFunctionBegin;
849: PetscCheck(ld>0,PetscObjectComm((PetscObject)ds),PETSC_ERR_ARG_OUTOFRANGE,"Leading dimension should be at least one");
850: if (ld!=ds->ld) {
851: PetscCall(PetscInfo(ds,"Allocating memory with leading dimension=%" PetscInt_FMT "\n",ld));
852: PetscCall(DSReset(ds));
853: ds->ld = ld;
854: PetscUseTypeMethod(ds,allocate,ld);
855: }
856: PetscFunctionReturn(PETSC_SUCCESS);
857: }
859: /*@
860: DSReset - Resets the DS context to the initial state.
862: Collective
864: Input Parameter:
865: . ds - the direct solver context
867: Note:
868: All data structures with size depending on the leading dimension
869: of DSAllocate() are released.
871: Level: advanced
873: .seealso: DSDestroy(), DSAllocate()
874: @*/
875: PetscErrorCode DSReset(DS ds)
876: {
877: PetscInt i;
879: PetscFunctionBegin;
881: if (!ds) PetscFunctionReturn(PETSC_SUCCESS);
882: ds->state = DS_STATE_RAW;
883: ds->ld = 0;
884: ds->l = 0;
885: ds->n = 0;
886: ds->k = 0;
887: for (i=0;i<DS_NUM_MAT;i++) PetscCall(MatDestroy(&ds->omat[i]));
888: PetscCall(PetscFree(ds->perm));
889: PetscFunctionReturn(PETSC_SUCCESS);
890: }
892: /*@C
893: DSDestroy - Destroys DS context that was created with DSCreate().
895: Collective
897: Input Parameter:
898: . ds - the direct solver context
900: Level: beginner
902: .seealso: DSCreate()
903: @*/
904: PetscErrorCode DSDestroy(DS *ds)
905: {
906: PetscFunctionBegin;
907: if (!*ds) PetscFunctionReturn(PETSC_SUCCESS);
909: if (--((PetscObject)(*ds))->refct > 0) { *ds = NULL; PetscFunctionReturn(PETSC_SUCCESS); }
910: PetscCall(DSReset(*ds));
911: PetscTryTypeMethod(*ds,destroy);
912: PetscCall(PetscFree((*ds)->work));
913: PetscCall(PetscFree((*ds)->rwork));
914: PetscCall(PetscFree((*ds)->iwork));
915: if (!(*ds)->scset) PetscCall(PetscFree((*ds)->sc));
916: PetscCall(PetscHeaderDestroy(ds));
917: PetscFunctionReturn(PETSC_SUCCESS);
918: }
920: /*@C
921: DSRegister - Adds a direct solver to the DS package.
923: Not Collective
925: Input Parameters:
926: + name - name of a new user-defined DS
927: - function - routine to create context
929: Notes:
930: DSRegister() may be called multiple times to add several user-defined
931: direct solvers.
933: Level: advanced
935: .seealso: DSRegisterAll()
936: @*/
937: PetscErrorCode DSRegister(const char *name,PetscErrorCode (*function)(DS))
938: {
939: PetscFunctionBegin;
940: PetscCall(DSInitializePackage());
941: PetscCall(PetscFunctionListAdd(&DSList,name,function));
942: PetscFunctionReturn(PETSC_SUCCESS);
943: }
945: SLEPC_EXTERN PetscErrorCode DSCreate_HEP(DS);
946: SLEPC_EXTERN PetscErrorCode DSCreate_NHEP(DS);
947: SLEPC_EXTERN PetscErrorCode DSCreate_GHEP(DS);
948: SLEPC_EXTERN PetscErrorCode DSCreate_GHIEP(DS);
949: SLEPC_EXTERN PetscErrorCode DSCreate_GNHEP(DS);
950: SLEPC_EXTERN PetscErrorCode DSCreate_NHEPTS(DS);
951: SLEPC_EXTERN PetscErrorCode DSCreate_SVD(DS);
952: SLEPC_EXTERN PetscErrorCode DSCreate_HSVD(DS);
953: SLEPC_EXTERN PetscErrorCode DSCreate_GSVD(DS);
954: SLEPC_EXTERN PetscErrorCode DSCreate_PEP(DS);
955: SLEPC_EXTERN PetscErrorCode DSCreate_NEP(DS);
957: /*@C
958: DSRegisterAll - Registers all of the direct solvers in the DS package.
960: Not Collective
962: Level: advanced
964: .seealso: DSRegister()
965: @*/
966: PetscErrorCode DSRegisterAll(void)
967: {
968: PetscFunctionBegin;
969: if (DSRegisterAllCalled) PetscFunctionReturn(PETSC_SUCCESS);
970: DSRegisterAllCalled = PETSC_TRUE;
971: PetscCall(DSRegister(DSHEP,DSCreate_HEP));
972: PetscCall(DSRegister(DSNHEP,DSCreate_NHEP));
973: PetscCall(DSRegister(DSGHEP,DSCreate_GHEP));
974: PetscCall(DSRegister(DSGHIEP,DSCreate_GHIEP));
975: PetscCall(DSRegister(DSGNHEP,DSCreate_GNHEP));
976: PetscCall(DSRegister(DSNHEPTS,DSCreate_NHEPTS));
977: PetscCall(DSRegister(DSSVD,DSCreate_SVD));
978: PetscCall(DSRegister(DSHSVD,DSCreate_HSVD));
979: PetscCall(DSRegister(DSGSVD,DSCreate_GSVD));
980: PetscCall(DSRegister(DSPEP,DSCreate_PEP));
981: PetscCall(DSRegister(DSNEP,DSCreate_NEP));
982: PetscFunctionReturn(PETSC_SUCCESS);
983: }