/* Program usage: mpirun -np ex2 [-help] [all PETSc options] */ static char help[] = "Solves a linear system in parallel with KSP.\n\ Input parameters include:\n\ -random_exact_sol : use a random exact solution vector\n\ -view_exact_sol : write exact solution vector to stdout\n\ -m : number of mesh points in x-direction\n\ -n : number of mesh points in y-direction\n\n"; /*T Concepts: KSP^basic parallel example; Concepts: KSP^Laplacian, 2d Concepts: Laplacian, 2d Processors: n T*/ /* Include "petscksp.h" so that we can use KSP solvers. Note that this file automatically includes: petsc.h - base PETSc routines petscvec.h - vectors petscsys.h - system routines petscmat.h - matrices petscis.h - index sets petscksp.h - Krylov subspace methods petscviewer.h - viewers petscpc.h - preconditioners */ #include "petscksp.h" #undef __FUNCT__ #define __FUNCT__ "main" int main(int argc,char **args) { Vec x,b,u; /* approx solution, RHS, exact solution */ Mat A; /* linear system matrix */ KSP ksp; /* linear solver context */ PetscRandom rctx; /* random number generator context */ PetscReal norm; /* norm of solution error */ PetscInt i,j,I,J,Istart,Iend,m = 8,n = 7,its; PetscErrorCode ierr; PetscTruth flg; PetscScalar v,one = 1.0,neg_one = -1.0; PetscInitialize(&argc,&args,(char *)0,help); ierr = PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);CHKERRQ(ierr); /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Compute the matrix and right-hand-side vector that define the linear system, Ax = b. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ /* Create parallel matrix, specifying only its global dimensions. When using MatCreate(), the matrix format can be specified at runtime. Also, the parallel partitioning of the matrix is determined by PETSc at runtime. Performance tuning note: For problems of substantial size, preallocation of matrix memory is crucial for attaining good performance. See the matrix chapter of the users manual for details. */ ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr); ierr = MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);CHKERRQ(ierr); ierr = MatSetFromOptions(A);CHKERRQ(ierr); /* Currently, all PETSc parallel matrix formats are partitioned by contiguous chunks of rows across the processors. Determine which rows of the matrix are locally owned. */ ierr = MatGetOwnershipRange(A,&Istart,&Iend);CHKERRQ(ierr); /* Set matrix elements for the 2-D, five-point stencil in parallel. - Each processor needs to insert only elements that it owns locally (but any non-local elements will be sent to the appropriate processor during matrix assembly). - Always specify global rows and columns of matrix entries. Note: this uses the less common natural ordering that orders first all the unknowns for x = h then for x = 2h etc; Hence you see J = I +- n instead of J = I +- m as you might expect. The more standard ordering would first do all variables for y = h, then y = 2h etc. */ for (I=Istart; I0) {J = I - n; ierr = MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);} if (i0) {J = I - 1; ierr = MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);} if (j -pc_type -ksp_monitor -ksp_rtol These options will override those specified above as long as KSPSetFromOptions() is called _after_ any other customization routines. */ ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Solve the linear system - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ ierr = KSPSolve(ksp,b,x);CHKERRQ(ierr); /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Check solution and clean up - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ /* Check the error */ ierr = VecAXPY(x,neg_one,u);CHKERRQ(ierr); ierr = VecNorm(x,NORM_2,&norm);CHKERRQ(ierr); ierr = KSPGetIterationNumber(ksp,&its);CHKERRQ(ierr); /* Scale the norm */ /* norm *= sqrt(1.0/((m+1)*(n+1))); */ /* Print convergence information. PetscPrintf() produces a single print statement from all processes that share a communicator. An alternative is PetscFPrintf(), which prints to a file. */ ierr = PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %D\n", norm,its);CHKERRQ(ierr); /* Free work space. All PETSc objects should be destroyed when they are no longer needed. */ ierr = KSPDestroy(ksp);CHKERRQ(ierr); ierr = VecDestroy(u);CHKERRQ(ierr); ierr = VecDestroy(x);CHKERRQ(ierr); ierr = VecDestroy(b);CHKERRQ(ierr); ierr = MatDestroy(A);CHKERRQ(ierr); /* Always call PetscFinalize() before exiting a program. This routine - finalizes the PETSc libraries as well as MPI - provides summary and diagnostic information if certain runtime options are chosen (e.g., -log_summary). */ ierr = PetscFinalize();CHKERRQ(ierr); return 0; }