Actual source code: da1.c
1: #define PETSCDM_DLL
2: /*
3: Code for manipulating distributed regular 1d arrays in parallel.
4: This file was created by Peter Mell 6/30/95
5: */
7: #include src/dm/da/daimpl.h
11: PetscErrorCode DAView_1d(DA da,PetscViewer viewer)
12: {
14: PetscMPIInt rank;
15: PetscTruth iascii,isdraw;
18: MPI_Comm_rank(da->comm,&rank);
20: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
21: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
22: if (iascii) {
23: PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,da->M,
24: da->m,da->w,da->s);
25: PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",da->xs,da->xe);
26: PetscViewerFlush(viewer);
27: } else if (isdraw) {
28: PetscDraw draw;
29: double ymin = -1,ymax = 1,xmin = -1,xmax = da->M,x;
30: PetscInt base;
31: char node[10];
32: PetscTruth isnull;
34: PetscViewerDrawGetDraw(viewer,0,&draw);
35: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
37: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
38: PetscDrawSynchronizedClear(draw);
40: /* first processor draws all node lines */
41: if (!rank) {
42: PetscInt xmin_tmp;
43: ymin = 0.0; ymax = 0.3;
44:
45: /* ADIC doesn't like doubles in a for loop */
46: for (xmin_tmp =0; xmin_tmp < da->M; xmin_tmp++) {
47: PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
48: }
50: xmin = 0.0; xmax = da->M - 1;
51: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
52: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
53: }
55: PetscDrawSynchronizedFlush(draw);
56: PetscDrawPause(draw);
58: /* draw my box */
59: ymin = 0; ymax = 0.3; xmin = da->xs / da->w; xmax = (da->xe / da->w) - 1;
60: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
61: PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
62: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
63: PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);
65: /* Put in index numbers */
66: base = da->base / da->w;
67: for (x=xmin; x<=xmax; x++) {
68: sprintf(node,"%d",(int)base++);
69: PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
70: }
72: PetscDrawSynchronizedFlush(draw);
73: PetscDrawPause(draw);
74: } else {
75: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for DA 1d",((PetscObject)viewer)->type_name);
76: }
77: return(0);
78: }
80: EXTERN PetscErrorCode DAPublish_Petsc(PetscObject);
84: /*@C
85: DACreate1d - Creates an object that will manage the communication of one-dimensional
86: regular array data that is distributed across some processors.
88: Collective on MPI_Comm
90: Input Parameters:
91: + comm - MPI communicator
92: . wrap - type of periodicity should the array have, if any. Use
93: either DA_NONPERIODIC or DA_XPERIODIC
94: . M - global dimension of the array (use -M to indicate that it may be set to a different value
95: from the command line with -da_grid_x <M>)
96: . dof - number of degrees of freedom per node
97: . lc - array containing number of nodes in the X direction on each processor,
98: or PETSC_NULL. If non-null, must be of length as m.
99: - s - stencil width
101: Output Parameter:
102: . inra - the resulting distributed array object
104: Options Database Key:
105: + -da_view - Calls DAView() at the conclusion of DACreate1d()
106: . -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
107: - -da_refine_x - refinement factor
109: Level: beginner
111: Notes:
112: The array data itself is NOT stored in the DA, it is stored in Vec objects;
113: The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
114: and DACreateLocalVector() and calls to VecDuplicate() if more are needed.
117: .keywords: distributed array, create, one-dimensional
119: .seealso: DADestroy(), DAView(), DACreate2d(), DACreate3d(), DAGlobalToLocalBegin(), DASetRefinementFactor(),
120: DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(), DAGetRefinementFactor(),
121: DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView()
123: @*/
124: PetscErrorCode DACreate1d(MPI_Comm comm,DAPeriodicType wrap,PetscInt M,PetscInt dof,PetscInt s,PetscInt *lc,DA *inra)
125: {
127: PetscMPIInt rank,size;
128: PetscInt i,*idx,nn,left,refine_x = 2,tM = M,xs,xe,x,Xs,Xe,start,end,m;
129: PetscTruth flg1,flg2;
130: DA da;
131: Vec local,global;
132: VecScatter ltog,gtol;
133: IS to,from;
137: *inra = 0;
138: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
139: DMInitializePackage(PETSC_NULL);
140: #endif
142: if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
143: if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
145: PetscOptionsBegin(comm,PETSC_NULL,"1d DA Options","DA");
146: if (M < 0) {
147: tM = -M;
148: PetscOptionsInt("-da_grid_x","Number of grid points in x direction","DACreate1d",tM,&tM,PETSC_NULL);
149: }
150: PetscOptionsInt("-da_refine_x","Refinement ratio in x direction","DASetRefinementFactor",refine_x,&refine_x,PETSC_NULL);
151: PetscOptionsEnd();
152: M = tM;
154: PetscHeaderCreate(da,_p_DA,struct _DAOps,DA_COOKIE,0,"DA",comm,DADestroy,DAView);
155: da->bops->publish = DAPublish_Petsc;
156: da->ops->createglobalvector = DACreateGlobalVector;
157: da->ops->getinterpolation = DAGetInterpolation;
158: da->ops->getcoloring = DAGetColoring;
159: da->ops->getmatrix = DAGetMatrix;
160: da->ops->refine = DARefine;
161: PetscLogObjectMemory(da,sizeof(struct _p_DA));
162: da->dim = 1;
163: da->interptype = DA_Q1;
164: da->refine_x = refine_x;
165: PetscMalloc(dof*sizeof(char*),&da->fieldname);
166: PetscMemzero(da->fieldname,dof*sizeof(char*));
167: MPI_Comm_size(comm,&size);
168: MPI_Comm_rank(comm,&rank);
170: m = size;
172: if (M < m) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"More processors than data points! %D %D",m,M);
173: if ((M-1) < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
175: /*
176: Determine locally owned region
177: xs is the first local node number, x is the number of local nodes
178: */
179: if (!lc) {
180: PetscOptionsHasName(PETSC_NULL,"-da_partition_blockcomm",&flg1);
181: PetscOptionsHasName(PETSC_NULL,"-da_partition_nodes_at_end",&flg2);
182: if (flg1) { /* Block Comm type Distribution */
183: xs = rank*M/m;
184: x = (rank + 1)*M/m - xs;
185: } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
186: x = (M + rank)/m;
187: if (M/m == x) { xs = rank*x; }
188: else { xs = rank*(x-1) + (M+rank)%(x*m); }
189: } else { /* The odd nodes are evenly distributed across the first k nodes */
190: /* Regular PETSc Distribution */
191: x = M/m + ((M % m) > rank);
192: if (rank >= (M % m)) {xs = (rank * (PetscInt)(M/m) + M % m);}
193: else {xs = rank * (PetscInt)(M/m) + rank;}
194: }
195: } else {
196: x = lc[rank];
197: xs = 0;
198: for (i=0; i<rank; i++) {
199: xs += lc[i];
200: }
201: /* verify that data user provided is consistent */
202: left = xs;
203: for (i=rank; i<size; i++) {
204: left += lc[i];
205: }
206: if (left != M) {
207: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lc across processors not equal to M %D %D",left,M);
208: }
209: }
211: /* From now on x,s,xs,xe,Xs,Xe are the exact location in the array */
212: x *= dof;
213: s *= dof; /* NOTE: here change s to be absolute stencil distance */
214: xs *= dof;
215: xe = xs + x;
217: /* determine ghost region */
218: if (wrap == DA_XPERIODIC) {
219: Xs = xs - s;
220: Xe = xe + s;
221: } else {
222: if ((xs-s) >= 0) Xs = xs-s; else Xs = 0;
223: if ((xe+s) <= M*dof) Xe = xe+s; else Xe = M*dof;
224: }
226: /* allocate the base parallel and sequential vectors */
227: da->Nlocal = x;
228: VecCreateMPIWithArray(comm,da->Nlocal,PETSC_DECIDE,0,&global);
229: VecSetBlockSize(global,dof);
230: da->nlocal = (Xe-Xs);
231: VecCreateSeqWithArray(PETSC_COMM_SELF,da->nlocal,0,&local);
232: VecSetBlockSize(local,dof);
233:
234: /* Create Local to Global Vector Scatter Context */
235: /* local to global inserts non-ghost point region into global */
236: VecGetOwnershipRange(global,&start,&end);
237: ISCreateStride(comm,x,start,1,&to);
238: ISCreateStride(comm,x,xs-Xs,1,&from);
239: VecScatterCreate(local,from,global,to,<og);
240: PetscLogObjectParent(da,to);
241: PetscLogObjectParent(da,from);
242: PetscLogObjectParent(da,ltog);
243: ISDestroy(from);
244: ISDestroy(to);
246: /* Create Global to Local Vector Scatter Context */
247: /* global to local must retrieve ghost points */
248: ISCreateStride(comm,(Xe-Xs),0,1,&to);
249:
250: PetscMalloc((x+2*s)*sizeof(PetscInt),&idx);
251: PetscLogObjectMemory(da,(x+2*s)*sizeof(PetscInt));
253: nn = 0;
254: if (wrap == DA_XPERIODIC) { /* Handle all cases with wrap first */
256: for (i=0; i<s; i++) { /* Left ghost points */
257: if ((xs-s+i)>=0) { idx[nn++] = xs-s+i;}
258: else { idx[nn++] = M*dof+(xs-s+i);}
259: }
261: for (i=0; i<x; i++) { idx [nn++] = xs + i;} /* Non-ghost points */
262:
263: for (i=0; i<s; i++) { /* Right ghost points */
264: if ((xe+i)<M*dof) { idx [nn++] = xe+i; }
265: else { idx [nn++] = (xe+i) - M*dof;}
266: }
267: } else { /* Now do all cases with no wrapping */
269: if (s <= xs) {for (i=0; i<s; i++) {idx[nn++] = xs - s + i;}}
270: else {for (i=0; i<xs; i++) {idx[nn++] = i;}}
272: for (i=0; i<x; i++) { idx [nn++] = xs + i;}
273:
274: if ((xe+s)<=M*dof) {for (i=0; i<s; i++) {idx[nn++]=xe+i;}}
275: else {for (i=xe; i<(M*dof); i++) {idx[nn++]=i; }}
276: }
278: ISCreateGeneral(comm,nn,idx,&from);
279: VecScatterCreate(global,from,local,to,>ol);
280: PetscLogObjectParent(da,to);
281: PetscLogObjectParent(da,from);
282: PetscLogObjectParent(da,gtol);
283: ISDestroy(to);
284: ISDestroy(from);
285: VecDestroy(local);
286: VecDestroy(global);
288: da->M = M; da->N = 1; da->m = m; da->n = 1;
289: da->xs = xs; da->xe = xe; da->ys = 0; da->ye = 1; da->zs = 0; da->ze = 1;
290: da->Xs = Xs; da->Xe = Xe; da->Ys = 0; da->Ye = 1; da->Zs = 0; da->Ze = 1;
291: da->P = 1; da->p = 1; da->w = dof; da->s = s/dof;
293: da->gtol = gtol;
294: da->ltog = ltog;
295: da->idx = idx;
296: da->Nl = nn;
297: da->base = xs;
298: da->ops->view = DAView_1d;
299: da->wrap = wrap;
300: da->stencil_type = DA_STENCIL_STAR;
302: /*
303: Set the local to global ordering in the global vector, this allows use
304: of VecSetValuesLocal().
305: */
306: ISLocalToGlobalMappingCreateNC(comm,nn,idx,&da->ltogmap);
307: ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
308: PetscLogObjectParent(da,da->ltogmap);
310: da->ltol = PETSC_NULL;
311: da->ao = PETSC_NULL;
313: DAView_Private(da);
314: *inra = da;
315: PetscPublishAll(da);
316: return(0);
317: }
321: /*
322: Processes command line options to determine if/how a DA
323: is to be viewed. Called by DACreateXX()
324: */
325: PetscErrorCode DAView_Private(DA da)
326: {
328: PetscTruth flg1;
331: PetscOptionsBegin(da->comm,da->prefix,"Distributed array (DA) options","DA");
332: PetscOptionsTruth("-da_view","Print information about the DA's distribution","DAView",PETSC_FALSE,&flg1,PETSC_NULL);
333: if (flg1) {DAView(da,PETSC_VIEWER_STDOUT_(da->comm));}
334: PetscOptionsTruth("-da_view_draw","Draw how the DA is distributed","DAView",PETSC_FALSE,&flg1,PETSC_NULL);
335: if (flg1) {DAView(da,PETSC_VIEWER_DRAW_(da->comm));}
336: PetscOptionsEnd();
337: return(0);
338: }