Actual source code: partition.c

  1: #define PETSCMAT_DLL

 3:  #include src/mat/matimpl.h

  5: /* Logging support */
  6: PetscCookie  MAT_PARTITIONING_COOKIE = 0;

  8: /*
  9:    Simplest partitioning, keeps the current partitioning.
 10: */
 13: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
 14: {
 16:   PetscInt       m;
 17:   PetscMPIInt    rank,size;

 20:   MPI_Comm_size(part->comm,&size);
 21:   if (part->n != size) {
 22:     SETERRQ(PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -matpartitioning_type parmetis or chaco or scotch for more than one subdomain per processor");
 23:   }
 24:   MPI_Comm_rank(part->comm,&rank);

 26:   MatGetLocalSize(part->adj,&m,PETSC_NULL);
 27:   ISCreateStride(part->comm,m,rank,0,partitioning);
 28:   return(0);
 29: }

 33: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
 34: {
 36:   PetscInt       cell,n,N,p,rstart,rend,*color;
 37:   PetscMPIInt    size;

 40:   MPI_Comm_size(part->comm,&size);
 41:   if (part->n != size) {
 42:     SETERRQ(PETSC_ERR_SUP,"Currently only supports one domain per processor");
 43:   }
 44:   p = (PetscInt)sqrt((double)part->n);
 45:   if (p*p != part->n) {
 46:     SETERRQ(PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
 47:   }
 48:   MatGetSize(part->adj,&N,PETSC_NULL);
 49:   n = (PetscInt)sqrt((double)N);
 50:   if (n*n != N) {  /* This condition is NECESSARY, but NOT SUFFICIENT in order to the domain be square */
 51:     SETERRQ(PETSC_ERR_SUP,"Square partitioning requires square domain");
 52:   }
 53:   if (n%p != 0) {
 54:     SETERRQ(PETSC_ERR_SUP,"Square partitioning requires p to divide n");
 55:   }
 56:   MatGetOwnershipRange(part->adj,&rstart,&rend);
 57:   PetscMalloc((rend-rstart)*sizeof(PetscInt),&color);
 58:   /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
 59:   for (cell=rstart; cell<rend; cell++) {
 60:     color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
 61:   }
 62:   ISCreateGeneral(part->comm,rend-rstart,color,partitioning);
 63:   PetscFree(color);

 65:   return(0);
 66: }

 71: PetscErrorCode  MatPartitioningCreate_Current(MatPartitioning part)
 72: {
 74:   part->ops->apply   = MatPartitioningApply_Current;
 75:   part->ops->view    = 0;
 76:   part->ops->destroy = 0;
 77:   return(0);
 78: }

 84: PetscErrorCode  MatPartitioningCreate_Square(MatPartitioning part)
 85: {
 87:   part->ops->apply   = MatPartitioningApply_Square;
 88:   part->ops->view    = 0;
 89:   part->ops->destroy = 0;
 90:   return(0);
 91: }

 94: /* ===========================================================================================*/

 96:  #include petscsys.h

 98: PetscFList MatPartitioningList = 0;
 99: PetscTruth MatPartitioningRegisterAllCalled = PETSC_FALSE;


104: PetscErrorCode  MatPartitioningRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(MatPartitioning))
105: {
107:   char fullname[PETSC_MAX_PATH_LEN];

110:   PetscFListConcat(path,name,fullname);
111:   PetscFListAdd(&MatPartitioningList,sname,fullname,(void (*)(void))function);
112:   return(0);
113: }

117: /*@C
118:    MatPartitioningRegisterDestroy - Frees the list of partitioning routines.

120:   Not Collective

122:   Level: developer

124: .keywords: matrix, register, destroy

126: .seealso: MatPartitioningRegisterDynamic(), MatPartitioningRegisterAll()
127: @*/
128: PetscErrorCode  MatPartitioningRegisterDestroy(void)
129: {

133:   if (MatPartitioningList) {
134:     PetscFListDestroy(&MatPartitioningList);
135:     MatPartitioningList = 0;
136:   }
137:   return(0);
138: }

142: /*@C
143:    MatPartitioningGetType - Gets the Partitioning method type and name (as a string) 
144:         from the partitioning context.

146:    Not collective

148:    Input Parameter:
149: .  partitioning - the partitioning context

151:    Output Parameter:
152: .  type - partitioner type

154:    Level: intermediate

156:    Not Collective

158: .keywords: Partitioning, get, method, name, type
159: @*/
160: PetscErrorCode  MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
161: {
163:   *type = partitioning->type_name;
164:   return(0);
165: }

169: /*@C
170:    MatPartitioningSetNParts - Set how many partitions need to be created;
171:         by default this is one per processor. Certain partitioning schemes may
172:         in fact only support that option.

174:    Not collective

176:    Input Parameter:
177: .  partitioning - the partitioning context
178: .  n - the number of partitions

180:    Level: intermediate

182:    Not Collective

184: .keywords: Partitioning, set

186: .seealso: MatPartitioningCreate(), MatPartitioningApply()
187: @*/
188: PetscErrorCode  MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
189: {
191:   part->n = n;
192:   return(0);
193: }

197: /*@
198:    MatPartitioningApply - Gets a partitioning for a matrix.

200:    Collective on Mat

202:    Input Parameters:
203: .  matp - the matrix partitioning object

205:    Output Parameters:
206: .   partitioning - the partitioning. For each local node this tells the processor
207:                    number that that node is assigned to.

209:    Options Database Keys:
210:    To specify the partitioning through the options database, use one of
211:    the following 
212: $    -mat_partitioning_type parmetis, -mat_partitioning current
213:    To see the partitioning result
214: $    -mat_partitioning_view

216:    Level: beginner

218:    The user can define additional partitionings; see MatPartitioningRegisterDynamic().

220: .keywords: matrix, get, partitioning

222: .seealso:  MatPartitioningRegisterDynamic(), MatPartitioningCreate(),
223:            MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
224:            ISPartitioningCount()
225: @*/
226: PetscErrorCode  MatPartitioningApply(MatPartitioning matp,IS *partitioning)
227: {
229:   PetscTruth flag;

234:   if (!matp->adj->assembled) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
235:   if (matp->adj->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
236:   if (!matp->ops->apply) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
238:   (*matp->ops->apply)(matp,partitioning);

241:   PetscOptionsHasName(PETSC_NULL,"-mat_partitioning_view",&flag);
242:   if (flag) {
243:     MatPartitioningView(matp,PETSC_VIEWER_STDOUT_(matp->comm));
244:     ISView(*partitioning,PETSC_VIEWER_STDOUT_(matp->comm));
245:   }
246:   return(0);
247: }
248: 
251: /*@
252:    MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
253:       partitioned.

255:    Collective on MatPartitioning and Mat

257:    Input Parameters:
258: +  part - the partitioning context
259: -  adj - the adjacency matrix

261:    Level: beginner

263: .keywords: Partitioning, adjacency

265: .seealso: MatPartitioningCreate()
266: @*/
267: PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
268: {
272:   part->adj = adj;
273:   return(0);
274: }

278: /*@
279:    MatPartitioningDestroy - Destroys the partitioning context.

281:    Collective on Partitioning

283:    Input Parameters:
284: .  part - the partitioning context

286:    Level: beginner

288: .keywords: Partitioning, destroy, context

290: .seealso: MatPartitioningCreate()
291: @*/
292: PetscErrorCode  MatPartitioningDestroy(MatPartitioning part)
293: {

298:   if (--part->refct > 0) return(0);

300:   if (part->ops->destroy) {
301:     (*part->ops->destroy)(part);
302:   }
303:   PetscFree(part->vertex_weights);
304:   PetscFree(part->part_weights);
305:   PetscHeaderDestroy(part);
306:   return(0);
307: }

311: /*@C
312:    MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.

314:    Collective on Partitioning

316:    Input Parameters:
317: +  part - the partitioning context
318: -  weights - the weights

320:    Level: beginner

322:    Notes:
323:       The array weights is freed by PETSc so the user should not free the array. In C/C++
324:    the array must be obtained with a call to PetscMalloc(), not malloc().

326: .keywords: Partitioning, destroy, context

328: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
329: @*/
330: PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
331: {


337:   PetscFree(part->vertex_weights);
338:   part->vertex_weights = (PetscInt*)weights;
339:   return(0);
340: }

344: /*@C
345:    MatPartitioningSetPartitionWeights - Sets the weights for each partition.

347:    Collective on Partitioning

349:    Input Parameters:
350: +  part - the partitioning context
351: -  weights - the weights

353:    Level: beginner

355:    Notes:
356:       The array weights is freed by PETSc so the user should not free the array. In C/C++
357:    the array must be obtained with a call to PetscMalloc(), not malloc().

359: .keywords: Partitioning, destroy, context

361: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
362: @*/
363: PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
364: {


370:   PetscFree(part->part_weights);
371:   part->part_weights = (PetscReal*)weights;
372:   return(0);
373: }

377: /*@
378:    MatPartitioningCreate - Creates a partitioning context.

380:    Collective on MPI_Comm

382:    Input Parameter:
383: .   comm - MPI communicator 

385:    Output Parameter:
386: .  newp - location to put the context

388:    Level: beginner

390: .keywords: Partitioning, create, context

392: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
393:           MatPartitioningSetAdjacency()

395: @*/
396: PetscErrorCode  MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
397: {
398:   MatPartitioning part;
399:   PetscErrorCode  ierr;
400:   PetscMPIInt     size;

403:   *newp          = 0;

405:   PetscHeaderCreate(part,_p_MatPartitioning,struct _MatPartitioningOps,MAT_PARTITIONING_COOKIE,-1,"MatPartitioning",comm,MatPartitioningDestroy,
406:                     MatPartitioningView);
407:   part->type           = -1;
408:   part->vertex_weights = PETSC_NULL;
409:   part->part_weights   = PETSC_NULL;
410:   MPI_Comm_size(comm,&size);
411:   part->n = (PetscInt)size;

413:   *newp = part;
414:   return(0);
415: }

419: /*@C 
420:    MatPartitioningView - Prints the partitioning data structure.

422:    Collective on MatPartitioning

424:    Input Parameters:
425: .  part - the partitioning context
426: .  viewer - optional visualization context

428:    Level: intermediate

430:    Note:
431:    The available visualization contexts include
432: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
433: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
434:          output where only the first processor opens
435:          the file.  All other processors send their 
436:          data to the first processor to print. 

438:    The user can open alternative visualization contexts with
439: .     PetscViewerASCIIOpen() - output to a specified file

441: .keywords: Partitioning, view

443: .seealso: PetscViewerASCIIOpen()
444: @*/
445: PetscErrorCode  MatPartitioningView(MatPartitioning part,PetscViewer viewer)
446: {
448:   PetscTruth          iascii;
449:   MatPartitioningType name;

453:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(part->comm);

457:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
458:   if (iascii) {
459:     MatPartitioningGetType(part,&name);
460:     PetscViewerASCIIPrintf(viewer,"MatPartitioning Object: %s\n",name);
461:     if (part->vertex_weights) {
462:       PetscViewerASCIIPrintf(viewer,"  Using vertex weights\n");
463:     }
464:   } else {
465:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this MatParitioning",((PetscObject)viewer)->type_name);
466:   }

468:   if (part->ops->view) {
469:     PetscViewerASCIIPushTab(viewer);
470:     (*part->ops->view)(part,viewer);
471:     PetscViewerASCIIPopTab(viewer);
472:   }

474:   return(0);
475: }

479: /*@C
480:    MatPartitioningSetType - Sets the type of partitioner to use

482:    Collective on MatPartitioning

484:    Input Parameter:
485: .  part - the partitioning context.
486: .  type - a known method

488:    Options Database Command:
489: $  -mat_partitioning_type  <type>
490: $      Use -help for a list of available methods
491: $      (for instance, parmetis)

493:    Level: intermediate

495: .keywords: partitioning, set, method, type

497: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType

499: @*/
500: PetscErrorCode  MatPartitioningSetType(MatPartitioning part,const MatPartitioningType type)
501: {
502:   PetscErrorCode ierr,(*r)(MatPartitioning);
503:   PetscTruth match;


509:   PetscTypeCompare((PetscObject)part,type,&match);
510:   if (match) return(0);

512:   if (part->setupcalled) {
513:      (*part->ops->destroy)(part);
514:     part->data        = 0;
515:     part->setupcalled = 0;
516:   }

518:   /* Get the function pointers for the method requested */
519:   if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
520:    PetscFListFind(part->comm,MatPartitioningList,type,(void (**)(void)) &r);

522:   if (!r) {SETERRQ1(PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);}

524:   part->ops->destroy      = (PetscErrorCode (*)(MatPartitioning)) 0;
525:   part->ops->view         = (PetscErrorCode (*)(MatPartitioning,PetscViewer)) 0;
526:   (*r)(part);

528:   PetscStrfree(part->type_name);
529:   PetscStrallocpy(type,&part->type_name);
530:   return(0);
531: }

535: /*@
536:    MatPartitioningSetFromOptions - Sets various partitioning options from the 
537:         options database.

539:    Collective on MatPartitioning

541:    Input Parameter:
542: .  part - the partitioning context.

544:    Options Database Command:
545: $  -mat_partitioning_type  <type>
546: $      Use -help for a list of available methods
547: $      (for instance, parmetis)

549:    Level: beginner

551: .keywords: partitioning, set, method, type
552: @*/
553: PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning part)
554: {
556:   PetscTruth flag;
557:   char       type[256];
558:   const char *def;

561:   if (!MatPartitioningRegisterAllCalled){ MatPartitioningRegisterAll(0);}
562:   PetscOptionsBegin(part->comm,part->prefix,"Partitioning options","MatOrderings");
563:     if (!part->type_name) {
564: #if defined(PETSC_HAVE_PARMETIS)
565:       def = MAT_PARTITIONING_PARMETIS;
566: #else
567:       def = MAT_PARTITIONING_CURRENT;
568: #endif
569:     } else {
570:       def = part->type_name;
571:     }
572:     PetscOptionsList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
573:     if (flag) {
574:       MatPartitioningSetType(part,type);
575:     }
576:     /*
577:       Set the type if it was never set.
578:     */
579:     if (!part->type_name) {
580:       MatPartitioningSetType(part,def);
581:     }

583:     if (part->ops->setfromoptions) {
584:       (*part->ops->setfromoptions)(part);
585:     }
586:   PetscOptionsEnd();
587:   return(0);
588: }