static char help[] = "Illustrates use of the preconditioner ASM.\n\ The Additive Schwarz Method for solving a linear system in parallel with KSP. The\n\ code indicates the procedure for setting user-defined subdomains. Input\n\ parameters include:\n\ -user_set_subdomain_solvers: User explicitly sets subdomain solvers\n\ -user_set_subdomains: Activate user-defined subdomains\n\n"; /* Note: This example focuses on setting the subdomains for the ASM preconditioner for a problem on a 2D rectangular grid. See ex1.c and ex2.c for more detailed comments on the basic usage of KSP (including working with matrices and vectors). The ASM preconditioner is fully parallel, but currently the routine PCASMCreateSubDomains2D(), which is used in this example to demonstrate user-defined subdomains (activated via -user_set_subdomains), is uniprocessor only. This matrix in this linear system arises from the discretized Laplacian, and thus is not very interesting in terms of experimenting with variants of the ASM preconditioner. */ /*T Concepts: KSP^Additive Schwarz Method (ASM) with user-defined subdomains Processors: n T*/ /* Include "petscksp.h" so that we can use KSP solvers. Note that this file automatically includes: petsc.h - base PETSc routines petscvec.h - vectors petscsys.h - system routines petscmat.h - matrices petscis.h - index sets petscksp.h - Krylov subspace methods petscviewer.h - viewers petscpc.h - preconditioners */ #include "petscksp.h" #undef __FUNCT__ #define __FUNCT__ "main" int main(int argc,char **args) { Vec x,b,u; /* approx solution, RHS, exact solution */ Mat A; /* linear system matrix */ KSP ksp; /* linear solver context */ PC pc; /* PC context */ IS *is; /* array of index sets that define the subdomains */ PetscInt overlap = 1; /* width of subdomain overlap */ PetscInt Nsub; /* number of subdomains */ PetscInt m = 15,n = 17; /* mesh dimensions in x- and y- directions */ PetscInt M = 2,N = 1; /* number of subdomains in x- and y- directions */ PetscInt i,j,I,J,Istart,Iend; PetscErrorCode ierr; PetscMPIInt size; PetscTruth flg; PetscTruth user_subdomains; /* flag - 1 indicates user-defined subdomains */ PetscScalar v, one = 1.0; PetscInitialize(&argc,&args,(char *)0,help); ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-M",&M,PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-N",&N,PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-overlap",&overlap,PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsHasName(PETSC_NULL,"-user_set_subdomains",&user_subdomains);CHKERRQ(ierr); /* ------------------------------------------------------------------- Compute the matrix and right-hand-side vector that define the linear system, Ax = b. ------------------------------------------------------------------- */ /* Assemble the matrix for the five point stencil, YET AGAIN */ ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr); ierr = MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);CHKERRQ(ierr); ierr = MatSetFromOptions(A);CHKERRQ(ierr); ierr = MatGetOwnershipRange(A,&Istart,&Iend);CHKERRQ(ierr); for (I=Istart; I0) {J = I - n; ierr = MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);} if (i0) {J = I - 1; ierr = MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);} if (j Set the total number of blocks via -pc_asm_blocks Note: The ASM default is to use 1 block per processor. To experiment on a single processor with various overlaps, you must specify use of multiple blocks! */ /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - More advanced method, setting user-defined subdomains - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Firstly, create index sets that define the subdomains. The utility routine PCASMCreateSubdomains2D() is a simple example (that currently supports 1 processor only!). More generally, the user should write a custom routine for a particular problem geometry. Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains() to set the subdomains for the ASM preconditioner. */ if (!user_subdomains) { /* basic version */ ierr = PCASMSetOverlap(pc,overlap);CHKERRQ(ierr); } else { /* advanced version */ if (size != 1) SETERRQ(1,"PCASMCreateSubdomains() is currently a uniprocessor routine only!"); ierr = PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is);CHKERRQ(ierr); ierr = PCASMSetLocalSubdomains(pc,Nsub,is);CHKERRQ(ierr); } /* ------------------------------------------------------------------- Set the linear solvers for the subblocks ------------------------------------------------------------------- */ /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Basic method, should be sufficient for the needs of most users. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - By default, the ASM preconditioner uses the same solver on each block of the problem. To set the same solver options on all blocks, use the prefix -sub before the usual PC and KSP options, e.g., -sub_pc_type -sub_ksp_type -sub_ksp_rtol 1.e-4 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Advanced method, setting different solvers for various blocks. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Note that each block's KSP context is completely independent of the others, and the full range of uniprocessor KSP options is available for each block. - Use PCASMGetSubKSP() to extract the array of KSP contexts for the local blocks. - See ex7.c for a simple example of setting different linear solvers for the individual blocks for the block Jacobi method (which is equivalent to the ASM method with zero overlap). */ ierr = PetscOptionsHasName(PETSC_NULL,"-user_set_subdomain_solvers",&flg);CHKERRQ(ierr); if (flg) { KSP *subksp; /* array of KSP contexts for local subblocks */ PetscInt nlocal,first; /* number of local subblocks, first local subblock */ PC subpc; /* PC context for subblock */ PetscTruth isasm; ierr = PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");CHKERRQ(ierr); /* Set runtime options */ ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); /* Flag an error if PCTYPE is changed from the runtime options */ ierr = PetscTypeCompare((PetscObject)pc,PCASM,&isasm);CHKERRQ(ierr); if (isasm) { SETERRQ(1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings"); } /* Call KSPSetUp() to set the block Jacobi data structures (including creation of an internal KSP context for each block). Note: KSPSetUp() MUST be called before PCASMGetSubKSP(). */ ierr = KSPSetUp(ksp);CHKERRQ(ierr); /* Extract the array of KSP contexts for the local blocks */ ierr = PCASMGetSubKSP(pc,&nlocal,&first,&subksp);CHKERRQ(ierr); /* Loop over the local blocks, setting various KSP options for each block. */ for (i=0; i