Actual source code: party.c

  1: #define PETSCMAT_DLL

 3:  #include src/mat/impls/adj/mpi/mpiadj.h

  5: #ifdef PETSC_HAVE_UNISTD_H
  6: #include <unistd.h>
  7: #endif

  9: #ifdef PETSC_HAVE_STDLIB_H
 10: #include <stdlib.h>
 11: #endif

 13: #include "petscfix.h"

 15: /* 
 16:    Currently using Party-1.99
 17: */
 19: #include "party_lib.h"

 22: typedef struct {
 23:     char redm[15];
 24:     char redo[15];
 25:     int rec;
 26:     int output;
 27:     char global_method[15];     /* global method */
 28:     char local_method[15];      /* local method */
 29:     int nbvtxcoarsed;           /* number of vertices for the coarse graph */
 30:     char *mesg_log;
 31: } MatPartitioning_Party;

 33: #define SIZE_LOG 10000          /* size of buffer for msg_log */

 37: static PetscErrorCode MatPartitioningApply_Party(MatPartitioning part, IS * partitioning)
 38: {
 40:     int  *locals, *parttab = NULL, rank, size;
 41:     Mat mat = part->adj, matMPI, matSeq;
 42:     int nb_locals;
 43:     Mat_MPIAdj *adj = (Mat_MPIAdj *) mat->data;
 44:     MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
 45:     PetscTruth flg;
 46: #ifdef PETSC_HAVE_UNISTD_H
 47:     int fd_stdout, fd_pipe[2], count;
 48: #endif

 51:     /* check if the matrix is sequential, use MatGetSubMatrices if necessary */
 52:     PetscTypeCompare((PetscObject) mat, MATMPIADJ, &flg);
 53:     MPI_Comm_size(mat->comm, &size);
 54:     MPI_Comm_rank(part->comm, &rank);
 55:     if (size > 1) {
 56:         int M, N;
 57:         IS isrow, iscol;
 58:         Mat *A;

 60:         if (flg)
 61:           SETERRQ(PETSC_ERR_SUP,"Distributed matrix format MPIAdj is not supported for sequential partitioners");
 62:         PetscPrintf(part->comm,"Converting distributed matrix to sequential: this could be a performance loss\n");
 63:         MatGetSize(mat, &M, &N);
 64:         ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow);
 65:         ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol);
 66:         MatGetSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A);
 67:         ISDestroy(isrow);
 68:         ISDestroy(iscol);
 69:         matSeq = *A;
 70:         PetscFree(A);
 71:     } else
 72:         matSeq = mat;

 74:     /* check for the input format that is supported only for a MPIADJ type 
 75:        and set it to matMPI */

 77:     if (!flg) {
 78:         MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matMPI);
 79:     } else {
 80:         matMPI = matSeq;
 81:     }

 83:     adj = (Mat_MPIAdj *) matMPI->data;  /* finaly adj contains adjacency graph */
 84:     {
 85:         /* Party library arguments definition */
 86:         int n = mat->M;         /* number of vertices in full graph */
 87:         int *edge_p = adj->i;   /* start of edge list for each vertex */
 88:         int *edge = adj->j;     /* edge list data */
 89:         int *vertex_w = NULL;   /* weights for all vertices */
 90:         int *edge_w = NULL;     /* weights for all edges */
 91:         float *x = NULL, *y = NULL, *z = NULL;  /* coordinates for inertial method */
 92:         int p = part->n;        /* number of parts to create */
 93:         int *part_party;        /* set number of each vtx (length n) */
 94:         int cutsize;            /* number of edge cut */
 95:         char *global = party->global_method;    /* global partitioning algorithm */
 96:         char *local = party->local_method;      /* local partitioning algorithm */
 97:         int redl = party->nbvtxcoarsed; /* how many vertices to coarsen down to? */
 98:         char *redm = party->redm;
 99:         char *redo = party->redo;
100:         int rec = party->rec;
101:         int output = party->output;

103:         PetscMalloc((mat->M) * sizeof(int), &part_party);

105:         /* redirect output to buffer party->mesg_log */
106: #ifdef PETSC_HAVE_UNISTD_H
107:         fd_stdout = dup(1);
108:         pipe(fd_pipe);
109:         close(1);
110:         dup2(fd_pipe[1], 1);
111:         PetscMalloc(SIZE_LOG * sizeof(char), &(party->mesg_log));
112: #endif

114:         /* library call */
115:         party_lib_times_start();
116:         party_lib(n, vertex_w, x, y, z, edge_p, edge, edge_w,
117:             p, part_party, &cutsize, redl, redm, redo,
118:             global, local, rec, output);

120:         party_lib_times_output(output);
121:         part_info(n, vertex_w, edge_p, edge, edge_w, p, part_party, output);

123: #ifdef PETSC_HAVE_UNISTD_H
124:         fflush(stdout);
125:         count =
126:             read(fd_pipe[0], party->mesg_log, (SIZE_LOG - 1) * sizeof(char));
127:         if (count < 0)
128:             count = 0;
129:         party->mesg_log[count] = 0;
130:         close(1);
131:         dup2(fd_stdout, 1);
132:         close(fd_stdout);
133:         close(fd_pipe[0]);
134:         close(fd_pipe[1]);
135: #endif
136:         /* if in the call we got an error, we say it */
137:         if (ierr) SETERRQ(PETSC_ERR_LIB, party->mesg_log);
138:         parttab = part_party;
139:     }

141:     /* Creation of the index set */
142:     MPI_Comm_rank(part->comm, &rank);
143:     MPI_Comm_size(part->comm, &size);
144:     nb_locals = mat->M / size;
145:     locals = parttab + rank * nb_locals;
146:     if (rank < mat->M % size) {
147:         nb_locals++;
148:         locals += rank;
149:     } else {
150:         locals += mat->M % size;
151:     }
152:     ISCreateGeneral(part->comm, nb_locals, locals, partitioning);

154:     /* destroying old objects */
155:     PetscFree(parttab);
156:     if (matSeq != mat) {
157:         MatDestroy(matSeq);
158:     }
159:     if (matMPI != mat) {
160:         MatDestroy(matMPI);
161:     }
162:     return(0);
163: }

167: PetscErrorCode MatPartitioningView_Party(MatPartitioning part, PetscViewer viewer)
168: {
169:   MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
170:   PetscErrorCode        ierr;
171:   PetscMPIInt           rank;
172:   PetscTruth            iascii;

175:   MPI_Comm_rank(part->comm, &rank);
176:   PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_ASCII, &iascii);
177:   if (iascii) {
178:     if (!rank && party->mesg_log) {
179:       PetscViewerASCIIPrintf(viewer, "%s\n", party->mesg_log);
180:     }
181:   } else {
182:     SETERRQ1(PETSC_ERR_SUP, "Viewer type %s not supported for this Party partitioner",((PetscObject) viewer)->type_name);
183:   }
184:   return(0);
185: }

189: /*@C
190:      MatPartitioningPartySetGlobal - Set method for global partitioning.

192:   Input Parameter:
193: .  part - the partitioning context
194: .  method - May be one of MP_PARTY_OPT, MP_PARTY_LIN, MP_PARTY_SCA, 
195:     MP_PARTY_RAN, MP_PARTY_GBF, MP_PARTY_GCF, MP_PARTY_BUB or MP_PARTY_DEF, or
196:     alternatively a string describing the method. Two or more methods can be 
197:     combined like "gbf,gcf". Check the Party Library Users Manual for details.

199:    Level: advanced

201: @*/
202: PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningPartySetGlobal(MatPartitioning part, const char *global)
203: {
204:     MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;

207:     PetscStrcpy(party->global_method, global);
208:     return(0);
209: }

213: /*@C
214:      MatPartitioningPartySetLocal - Set method for local partitioning.

216:   Input Parameter:
217: .  part - the partitioning context
218: .  method - One of MP_PARTY_HELPFUL_SETS, MP_PARTY_KERNIGHAN_LIN, or MP_PARTY_NONE. 
219:     Check the Party Library Users Manual for details.

221:    Level: advanced

223: @*/
224: PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningPartySetLocal(MatPartitioning part, const char *local)
225: {
226:     MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;

229:     PetscStrcpy(party->local_method, local);
230:     return(0);
231: }

235: /*@
236:     MatPartitioningPartySetCoarseLevel - Set the coarse level 
237:     
238:   Input Parameter:
239: .  part - the partitioning context
240: .  level - the coarse level in range [0.0,1.0]

242:    Level: advanced

244: @*/
245: PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningPartySetCoarseLevel(MatPartitioning part, PetscReal level)
246: {
247:     MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;

250:     if (level < 0 || level > 1.0) {
251:         SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Party: level of coarsening out of range [0.01-1.0]");
252:     } else {
253:         party->nbvtxcoarsed = (int)(part->adj->N * level);
254:     }
255:     if (party->nbvtxcoarsed < 20) party->nbvtxcoarsed = 20;
256:     return(0);
257: }

261: /*@
262:     MatPartitioningPartySetMatchOptimization - Activate matching optimization for graph reduction 
263:     
264:   Input Parameter:
265: .  part - the partitioning context
266: .  opt - activate optimization

268:    Level: advanced

270: @*/
271: PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningPartySetMatchOptimization(MatPartitioning part,
272:     PetscTruth opt)
273: {
274:     MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;

277:     if (opt)
278:         PetscStrcpy(party->redo, "w3");
279:     else
280:         PetscStrcpy(party->redo, "");
281:     return(0);
282: }

286: /*@
287:     MatPartitioningPartySetBipart - Activate or deactivate recursive bisection.
288:     
289:   Input Parameter:
290: .  part - the partitioning context
291: .  bp - PETSC_TRUE to activate recursive bisection 

293:    Level: advanced

295: @*/
296: PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningPartySetBipart(MatPartitioning part, PetscTruth bp)
297: {
298:     MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;

301:     if (bp)
302:         party->rec = 1;
303:     else
304:         party->rec = 0;
305:     return(0);
306: }

310: PetscErrorCode MatPartitioningSetFromOptions_Party(MatPartitioning part)
311: {
313:     PetscTruth flag, b;
314:     char value[15];
315:     PetscReal r;

318:     PetscOptionsHead("Set Party partitioning options");

320:     PetscOptionsString("-mat_partitioning_party_global",
321:         "Global method to use", "MatPartitioningPartySetGlobal", "gcf,gbf",
322:         value, 15, &flag);
323:     if (flag)
324:         MatPartitioningPartySetGlobal(part, value);

326:     PetscOptionsString("-mat_partitioning_party_local",
327:         "Local method to use", "MatPartitioningPartySetLocal", "kl", value, 15,
328:         &flag);
329:     if (flag)
330:         MatPartitioningPartySetLocal(part, value);

332:     PetscOptionsReal("-mat_partitioning_party_coarse_level",
333:         "Coarse level", "MatPartitioningPartySetCoarseLevel", 0, &r,
334:         &flag);
335:     if (flag)
336:         MatPartitioningPartySetCoarseLevel(part, r);

338:     PetscOptionsTruth("-mat_partitioning_party_match_optimization",
339:         "Matching optimization on/off (boolean)",
340:         "MatPartitioningPartySetMatchOptimization", PETSC_TRUE, &b, &flag);
341:     if (flag)
342:         MatPartitioningPartySetMatchOptimization(part, b);

344:     PetscOptionsTruth("-mat_partitioning_party_bipart",
345:         "Bipartitioning option on/off (boolean)",
346:         "MatPartitioningPartySetBipart", PETSC_TRUE, &b, &flag);
347:     if (flag)
348:         MatPartitioningPartySetBipart(part, b);

350:     PetscOptionsTail();
351:     return(0);
352: }


357: PetscErrorCode MatPartitioningDestroy_Party(MatPartitioning part)
358: {
359:     MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
360:     PetscErrorCode        ierr;

363:     PetscFree(party->mesg_log);
364:     PetscFree(party);
365:     return(0);
366: }

371: /*@C
372:    MAT_PARTITIONING_Party - Creates a partitioning context via the external package Party.

374:    Collective on MPI_Comm

376:    Input Parameter:
377: .  part - the partitioning context

379:    Options Database Keys:
380: +  -mat_partitioning_party_global <gcf,gbf>: Global method to use (MatPartitioningPartySetGlobal)
381: .  -mat_partitioning_party_local <kl>: Local method to use (MatPartitioningPartySetLocal)
382: .  -mat_partitioning_party_coarse_level <0>: Coarse level (MatPartitioningPartySetCoarseLevel)
383: .  -mat_partitioning_party_match_optimization: <true> Matching optimization on/off (boolean) (MatPartitioningPartySetMatchOptimization)
384: -  -mat_partitioning_party_bipart: <true> Bipartitioning option on/off (boolean) (MatPartitioningPartySetBipart)

386:    Level: beginner

388:    Notes: See http://wwwcs.upb.de/fachbereich/AG/monien/RESEARCH/PART/party.html

390: .keywords: Partitioning, create, context

392: .seealso: MatPartitioningSetType(), MatPartitioningType

394: @*/
395: PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningCreate_Party(MatPartitioning part)
396: {
398:     MatPartitioning_Party *party;

401:     PetscNew(MatPartitioning_Party, &party);

403:     PetscStrcpy(party->global_method, "gcf,gbf");
404:     PetscStrcpy(party->local_method, "kl");
405:     PetscStrcpy(party->redm, "lam");
406:     PetscStrcpy(party->redo, "w3");

408:     party->nbvtxcoarsed = 200;
409:     party->rec = 1;
410:     party->output = 1;
411:     party->mesg_log = NULL;

413:     part->ops->apply = MatPartitioningApply_Party;
414:     part->ops->view = MatPartitioningView_Party;
415:     part->ops->destroy = MatPartitioningDestroy_Party;
416:     part->ops->setfromoptions = MatPartitioningSetFromOptions_Party;
417:     part->data = (void*) party;
418:     return(0);
419: }