Actual source code: pdvec.c
1: #define PETSCVEC_DLL
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include src/vec/vec/impls/mpi/pvecimpl.h
6: #if defined(PETSC_HAVE_PNETCDF)
8: #include "pnetcdf.h"
10: #endif
14: PetscErrorCode VecDestroy_MPI(Vec v)
15: {
16: Vec_MPI *x = (Vec_MPI*)v->data;
20: #if defined(PETSC_USE_LOG)
21: PetscLogObjectState((PetscObject)v,"Length=%D",v->map.N);
22: #endif
23: PetscFree(x->array_allocated);
25: /* Destroy local representation of vector if it exists */
26: if (x->localrep) {
27: VecDestroy(x->localrep);
28: if (x->localupdate) {VecScatterDestroy(x->localupdate);}
29: }
30: /* Destroy the stashes: note the order - so that the tags are freed properly */
31: VecStashDestroy_Private(&v->bstash);
32: VecStashDestroy_Private(&v->stash);
33: PetscFree(x);
34: return(0);
35: }
39: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
40: {
41: PetscErrorCode ierr;
42: PetscInt i,work = xin->map.n,cnt,len;
43: PetscMPIInt j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
44: MPI_Status status;
45: PetscScalar *values,*xarray;
46: const char *name;
47: PetscViewerFormat format;
50: VecGetArray(xin,&xarray);
51: /* determine maximum message to arrive */
52: MPI_Comm_rank(xin->comm,&rank);
53: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
54: MPI_Comm_size(xin->comm,&size);
56: if (!rank) {
57: PetscMalloc((len+1)*sizeof(PetscScalar),&values);
58: PetscViewerGetFormat(viewer,&format);
59: /*
60: Matlab format and ASCII format are very similar except
61: Matlab uses %18.16e format while ASCII uses %g
62: */
63: if (format == PETSC_VIEWER_ASCII_MATLAB) {
64: PetscObjectGetName((PetscObject)xin,&name);
65: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
66: for (i=0; i<xin->map.n; i++) {
67: #if defined(PETSC_USE_COMPLEX)
68: if (PetscImaginaryPart(xarray[i]) > 0.0) {
69: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
70: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
71: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
72: } else {
73: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
74: }
75: #else
76: PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
77: #endif
78: }
79: /* receive and print messages */
80: for (j=1; j<size; j++) {
81: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
82: MPI_Get_count(&status,MPIU_SCALAR,&n);
83: for (i=0; i<n; i++) {
84: #if defined(PETSC_USE_COMPLEX)
85: if (PetscImaginaryPart(values[i]) > 0.0) {
86: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
87: } else if (PetscImaginaryPart(values[i]) < 0.0) {
88: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
89: } else {
90: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
91: }
92: #else
93: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
94: #endif
95: }
96: }
97: PetscViewerASCIIPrintf(viewer,"];\n");
99: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
100: for (i=0; i<xin->map.n; i++) {
101: #if defined(PETSC_USE_COMPLEX)
102: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
103: #else
104: PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
105: #endif
106: }
107: /* receive and print messages */
108: for (j=1; j<size; j++) {
109: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
110: MPI_Get_count(&status,MPIU_SCALAR,&n);
111: for (i=0; i<n; i++) {
112: #if defined(PETSC_USE_COMPLEX)
113: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
114: #else
115: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
116: #endif
117: }
118: }
119: } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
120: /*
121: state 0: No header has been output
122: state 1: Only POINT_DATA has been output
123: state 2: Only CELL_DATA has been output
124: state 3: Output both, POINT_DATA last
125: state 4: Output both, CELL_DATA last
126: */
127: static PetscInt stateId = -1;
128: int outputState = 0;
129: PetscTruth hasState;
130: int doOutput = 0;
131: PetscInt bs, b;
133: if (stateId < 0) {
134: PetscObjectComposedDataRegister(&stateId);
135: }
136: PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
137: if (!hasState) {
138: outputState = 0;
139: }
140: PetscObjectGetName((PetscObject) xin, &name);
141: VecGetLocalSize(xin, &n);
142: VecGetBlockSize(xin, &bs);
143: if ((bs < 1) || (bs > 3)) {
144: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
145: }
146: if (format == PETSC_VIEWER_ASCII_VTK) {
147: if (outputState == 0) {
148: outputState = 1;
149: doOutput = 1;
150: } else if (outputState == 1) {
151: doOutput = 0;
152: } else if (outputState == 2) {
153: outputState = 3;
154: doOutput = 1;
155: } else if (outputState == 3) {
156: doOutput = 0;
157: } else if (outputState == 4) {
158: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
159: }
160: if (doOutput) {
161: PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map.N);
162: }
163: } else {
164: if (outputState == 0) {
165: outputState = 2;
166: doOutput = 1;
167: } else if (outputState == 1) {
168: outputState = 4;
169: doOutput = 1;
170: } else if (outputState == 2) {
171: doOutput = 0;
172: } else if (outputState == 3) {
173: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
174: } else if (outputState == 4) {
175: doOutput = 0;
176: }
177: if (doOutput) {
178: PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map.N);
179: }
180: }
181: PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
182: if (name) {
183: PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
184: } else {
185: PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
186: }
187: PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
188: for (i=0; i<n/bs; i++) {
189: for (b=0; b<bs; b++) {
190: if (b > 0) {
191: PetscViewerASCIIPrintf(viewer," ");
192: }
193: #if !defined(PETSC_USE_COMPLEX)
194: PetscViewerASCIIPrintf(viewer,"%G",xarray[i*bs+b]);
195: #endif
196: }
197: PetscViewerASCIIPrintf(viewer,"\n");
198: }
199: for (j=1; j<size; j++) {
200: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
201: MPI_Get_count(&status,MPIU_SCALAR,&n);
202: for (i=0; i<n/bs; i++) {
203: for (b=0; b<bs; b++) {
204: if (b > 0) {
205: PetscViewerASCIIPrintf(viewer," ");
206: }
207: #if !defined(PETSC_USE_COMPLEX)
208: PetscViewerASCIIPrintf(viewer,"%G",values[i*bs+b]);
209: #endif
210: }
211: PetscViewerASCIIPrintf(viewer,"\n");
212: }
213: }
214: } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
215: PetscInt bs, b;
217: VecGetLocalSize(xin, &n);
218: VecGetBlockSize(xin, &bs);
219: if ((bs < 1) || (bs > 3)) {
220: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
221: }
222: for (i=0; i<n/bs; i++) {
223: for (b=0; b<bs; b++) {
224: if (b > 0) {
225: PetscViewerASCIIPrintf(viewer," ");
226: }
227: #if !defined(PETSC_USE_COMPLEX)
228: PetscViewerASCIIPrintf(viewer,"%G",xarray[i*bs+b]);
229: #endif
230: }
231: for (b=bs; b<3; b++) {
232: PetscViewerASCIIPrintf(viewer," 0.0");
233: }
234: PetscViewerASCIIPrintf(viewer,"\n");
235: }
236: for (j=1; j<size; j++) {
237: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
238: MPI_Get_count(&status,MPIU_SCALAR,&n);
239: for (i=0; i<n/bs; i++) {
240: for (b=0; b<bs; b++) {
241: if (b > 0) {
242: PetscViewerASCIIPrintf(viewer," ");
243: }
244: #if !defined(PETSC_USE_COMPLEX)
245: PetscViewerASCIIPrintf(viewer,"%G",values[i*bs+b]);
246: #endif
247: }
248: for (b=bs; b<3; b++) {
249: PetscViewerASCIIPrintf(viewer," 0.0");
250: }
251: PetscViewerASCIIPrintf(viewer,"\n");
252: }
253: }
254: } else if (format == PETSC_VIEWER_ASCII_PCICE) {
255: PetscInt bs, b, vertexCount = 1;
257: VecGetLocalSize(xin, &n);
258: VecGetBlockSize(xin, &bs);
259: if ((bs < 1) || (bs > 3)) {
260: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
261: }
262: PetscViewerASCIIPrintf(viewer,"%D\n", xin->map.N/bs);
263: for (i=0; i<n/bs; i++) {
264: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
265: for (b=0; b<bs; b++) {
266: if (b > 0) {
267: PetscViewerASCIIPrintf(viewer," ");
268: }
269: #if !defined(PETSC_USE_COMPLEX)
270: PetscViewerASCIIPrintf(viewer,"% 12.5E",xarray[i*bs+b]);
271: #endif
272: }
273: PetscViewerASCIIPrintf(viewer,"\n");
274: }
275: for (j=1; j<size; j++) {
276: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
277: MPI_Get_count(&status,MPIU_SCALAR,&n);
278: for (i=0; i<n/bs; i++) {
279: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
280: for (b=0; b<bs; b++) {
281: if (b > 0) {
282: PetscViewerASCIIPrintf(viewer," ");
283: }
284: #if !defined(PETSC_USE_COMPLEX)
285: PetscViewerASCIIPrintf(viewer,"% 12.5E",values[i*bs+b]);
286: #endif
287: }
288: PetscViewerASCIIPrintf(viewer,"\n");
289: }
290: }
291: } else if (format == PETSC_VIEWER_ASCII_PYLITH) {
292: PetscInt bs, b, vertexCount = 1;
294: VecGetLocalSize(xin, &n);
295: VecGetBlockSize(xin, &bs);
296: if (bs != 3) {
297: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PyLith can only handle 3D objects, but vector dimension is %d", bs);
298: }
299: PetscViewerASCIIPrintf(viewer,"#\n");
300: PetscViewerASCIIPrintf(viewer,"# Node X-coord Y-coord Z-coord\n");
301: PetscViewerASCIIPrintf(viewer,"#\n");
302: for (i=0; i<n/bs; i++) {
303: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
304: for (b=0; b<bs; b++) {
305: if (b > 0) {
306: PetscViewerASCIIPrintf(viewer," ");
307: }
308: #if !defined(PETSC_USE_COMPLEX)
309: PetscViewerASCIIPrintf(viewer,"% 16.8E",xarray[i*bs+b]);
310: #endif
311: }
312: PetscViewerASCIIPrintf(viewer,"\n");
313: }
314: for (j=1; j<size; j++) {
315: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
316: MPI_Get_count(&status,MPIU_SCALAR,&n);
317: for (i=0; i<n/bs; i++) {
318: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
319: for (b=0; b<bs; b++) {
320: if (b > 0) {
321: PetscViewerASCIIPrintf(viewer," ");
322: }
323: #if !defined(PETSC_USE_COMPLEX)
324: PetscViewerASCIIPrintf(viewer,"% 16.8E",values[i*bs+b]);
325: #endif
326: }
327: PetscViewerASCIIPrintf(viewer,"\n");
328: }
329: }
330: } else {
331: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
332: cnt = 0;
333: for (i=0; i<xin->map.n; i++) {
334: if (format == PETSC_VIEWER_ASCII_INDEX) {
335: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
336: }
337: #if defined(PETSC_USE_COMPLEX)
338: if (PetscImaginaryPart(xarray[i]) > 0.0) {
339: PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
340: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
341: PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
342: } else {
343: PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(xarray[i]));
344: }
345: #else
346: PetscViewerASCIIPrintf(viewer,"%G\n",xarray[i]);
347: #endif
348: }
349: /* receive and print messages */
350: for (j=1; j<size; j++) {
351: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
352: MPI_Get_count(&status,MPIU_SCALAR,&n);
353: if (format != PETSC_VIEWER_ASCII_COMMON) {
354: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
355: }
356: for (i=0; i<n; i++) {
357: if (format == PETSC_VIEWER_ASCII_INDEX) {
358: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
359: }
360: #if defined(PETSC_USE_COMPLEX)
361: if (PetscImaginaryPart(values[i]) > 0.0) {
362: PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
363: } else if (PetscImaginaryPart(values[i]) < 0.0) {
364: PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
365: } else {
366: PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(values[i]));
367: }
368: #else
369: PetscViewerASCIIPrintf(viewer,"%G\n",values[i]);
370: #endif
371: }
372: }
373: }
374: PetscFree(values);
375: } else {
376: /* send values */
377: MPI_Send(xarray,xin->map.n,MPIU_SCALAR,0,tag,xin->comm);
378: }
379: PetscViewerFlush(viewer);
380: VecRestoreArray(xin,&xarray);
381: return(0);
382: }
386: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
387: {
389: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag,n;
390: PetscInt len,work = xin->map.n,j;
391: int fdes;
392: MPI_Status status;
393: PetscScalar *values,*xarray;
394: FILE *file;
397: VecGetArray(xin,&xarray);
398: PetscViewerBinaryGetDescriptor(viewer,&fdes);
400: /* determine maximum message to arrive */
401: MPI_Comm_rank(xin->comm,&rank);
402: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
403: MPI_Comm_size(xin->comm,&size);
405: if (!rank) {
406: PetscInt cookie = VEC_FILE_COOKIE;
407: PetscBinaryWrite(fdes,&cookie,1,PETSC_INT,PETSC_FALSE);
408: PetscBinaryWrite(fdes,&xin->map.N,1,PETSC_INT,PETSC_FALSE);
409: PetscBinaryWrite(fdes,xarray,xin->map.n,PETSC_SCALAR,PETSC_FALSE);
411: PetscMalloc((len+1)*sizeof(PetscScalar),&values);
412: /* receive and print messages */
413: for (j=1; j<size; j++) {
414: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
415: MPI_Get_count(&status,MPIU_SCALAR,&n);
416: PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_FALSE);
417: }
418: PetscFree(values);
419: PetscViewerBinaryGetInfoPointer(viewer,&file);
420: if (file && xin->map.bs > 1) {
421: if (xin->prefix) {
422: PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",xin->prefix,xin->map.bs);
423: } else {
424: PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->map.bs);
425: }
426: }
427: } else {
428: /* send values */
429: MPI_Send(xarray,xin->map.n,MPIU_SCALAR,0,tag,xin->comm);
430: }
431: VecRestoreArray(xin,&xarray);
432: return(0);
433: }
437: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
438: {
439: PetscDraw draw;
440: PetscTruth isnull;
443: #if defined(PETSC_USE_64BIT_INDICES)
445: PetscViewerDrawGetDraw(viewer,0,&draw);
446: PetscDrawIsNull(draw,&isnull);
447: if (isnull) return(0);
448: SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
449: #else
450: PetscMPIInt size,rank;
451: int i,N = xin->map.N,*lens;
452: PetscReal *xx,*yy;
453: PetscDrawLG lg;
454: PetscScalar *xarray;
457: PetscViewerDrawGetDraw(viewer,0,&draw);
458: PetscDrawIsNull(draw,&isnull);
459: if (isnull) return(0);
461: VecGetArray(xin,&xarray);
462: PetscViewerDrawGetDrawLG(viewer,0,&lg);
463: PetscDrawCheckResizedWindow(draw);
464: MPI_Comm_rank(xin->comm,&rank);
465: MPI_Comm_size(xin->comm,&size);
466: if (!rank) {
467: PetscDrawLGReset(lg);
468: PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
469: for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
470: yy = xx + N;
471: PetscMalloc(size*sizeof(PetscInt),&lens);
472: for (i=0; i<size; i++) {
473: lens[i] = xin->map.range[i+1] - xin->map.range[i];
474: }
475: #if !defined(PETSC_USE_COMPLEX)
476: MPI_Gatherv(xarray,xin->map.n,MPIU_REAL,yy,lens,xin->map.range,MPIU_REAL,0,xin->comm);
477: #else
478: {
479: PetscReal *xr;
480: PetscMalloc((xin->map.n+1)*sizeof(PetscReal),&xr);
481: for (i=0; i<xin->map.n; i++) {
482: xr[i] = PetscRealPart(xarray[i]);
483: }
484: MPI_Gatherv(xr,xin->map.n,MPIU_REAL,yy,lens,xin->map.range,MPIU_REAL,0,xin->comm);
485: PetscFree(xr);
486: }
487: #endif
488: PetscFree(lens);
489: PetscDrawLGAddPoints(lg,N,&xx,&yy);
490: PetscFree(xx);
491: } else {
492: #if !defined(PETSC_USE_COMPLEX)
493: MPI_Gatherv(xarray,xin->map.n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
494: #else
495: {
496: PetscReal *xr;
497: PetscMalloc((xin->map.n+1)*sizeof(PetscReal),&xr);
498: for (i=0; i<xin->map.n; i++) {
499: xr[i] = PetscRealPart(xarray[i]);
500: }
501: MPI_Gatherv(xr,xin->map.n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
502: PetscFree(xr);
503: }
504: #endif
505: }
506: PetscDrawLGDraw(lg);
507: PetscDrawSynchronizedFlush(draw);
508: VecRestoreArray(xin,&xarray);
509: #endif
510: return(0);
511: }
514: /* I am assuming this is Extern 'C' because it is dynamically loaded. If not, we can remove the DLLEXPORT tag */
517: PetscErrorCode VecView_MPI_Draw(Vec xin,PetscViewer viewer)
518: {
520: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
521: PetscInt i,start,end;
522: MPI_Status status;
523: PetscReal coors[4],ymin,ymax,xmin,xmax,tmp;
524: PetscDraw draw;
525: PetscTruth isnull;
526: PetscDrawAxis axis;
527: PetscScalar *xarray;
530: PetscViewerDrawGetDraw(viewer,0,&draw);
531: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
533: VecGetArray(xin,&xarray);
534: PetscDrawCheckResizedWindow(draw);
535: xmin = 1.e20; xmax = -1.e20;
536: for (i=0; i<xin->map.n; i++) {
537: #if defined(PETSC_USE_COMPLEX)
538: if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
539: if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
540: #else
541: if (xarray[i] < xmin) xmin = xarray[i];
542: if (xarray[i] > xmax) xmax = xarray[i];
543: #endif
544: }
545: if (xmin + 1.e-10 > xmax) {
546: xmin -= 1.e-5;
547: xmax += 1.e-5;
548: }
549: MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPI_MIN,0,xin->comm);
550: MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPI_MAX,0,xin->comm);
551: MPI_Comm_size(xin->comm,&size);
552: MPI_Comm_rank(xin->comm,&rank);
553: PetscDrawAxisCreate(draw,&axis);
554: PetscLogObjectParent(draw,axis);
555: if (!rank) {
556: PetscDrawClear(draw);
557: PetscDrawFlush(draw);
558: PetscDrawAxisSetLimits(axis,0.0,(double)xin->map.N,ymin,ymax);
559: PetscDrawAxisDraw(axis);
560: PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
561: }
562: PetscDrawAxisDestroy(axis);
563: MPI_Bcast(coors,4,MPIU_REAL,0,xin->comm);
564: if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
565: /* draw local part of vector */
566: VecGetOwnershipRange(xin,&start,&end);
567: if (rank < size-1) { /*send value to right */
568: MPI_Send(&xarray[xin->map.n-1],1,MPIU_REAL,rank+1,tag,xin->comm);
569: }
570: for (i=1; i<xin->map.n; i++) {
571: #if !defined(PETSC_USE_COMPLEX)
572: PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
573: xarray[i],PETSC_DRAW_RED);
574: #else
575: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
576: PetscRealPart(xarray[i]),PETSC_DRAW_RED);
577: #endif
578: }
579: if (rank) { /* receive value from right */
580: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,xin->comm,&status);
581: #if !defined(PETSC_USE_COMPLEX)
582: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
583: #else
584: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
585: #endif
586: }
587: PetscDrawSynchronizedFlush(draw);
588: PetscDrawPause(draw);
589: VecRestoreArray(xin,&xarray);
590: return(0);
591: }
594: #if defined(PETSC_USE_SOCKET_VIEWER)
597: PetscErrorCode VecView_MPI_Socket(Vec xin,PetscViewer viewer)
598: {
599: #if defined(PETSC_USE_64BIT_INDICES)
601: SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
602: #else
604: PetscMPIInt rank,size;
605: int i,N = xin->map.N,*lens;
606: PetscScalar *xx,*xarray;
609: VecGetArray(xin,&xarray);
610: MPI_Comm_rank(xin->comm,&rank);
611: MPI_Comm_size(xin->comm,&size);
612: if (!rank) {
613: PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
614: PetscMalloc(size*sizeof(PetscInt),&lens);
615: for (i=0; i<size; i++) {
616: lens[i] = xin->map.range[i+1] - xin->map.range[i];
617: }
618: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,xx,lens,xin->map.range,MPIU_SCALAR,0,xin->comm);
619: PetscFree(lens);
620: PetscViewerSocketPutScalar(viewer,N,1,xx);
621: PetscFree(xx);
622: } else {
623: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,xin->comm);
624: }
625: VecRestoreArray(xin,&xarray);
626: #endif
627: return(0);
628: }
629: #endif
631: #if defined(PETSC_HAVE_MATLAB)
634: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
635: {
637: PetscMPIInt rank,size,*lens;
638: PetscInt i,N = xin->map.N;
639: PetscScalar *xx,*xarray;
642: VecGetArray(xin,&xarray);
643: MPI_Comm_rank(xin->comm,&rank);
644: MPI_Comm_size(xin->comm,&size);
645: if (!rank) {
646: PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
647: PetscMalloc(size*sizeof(PetscMPIInt),&lens);
648: for (i=0; i<size; i++) {
649: lens[i] = xin->map.range[i+1] - xin->map.range[i];
650: }
651: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,xx,lens,xin->map.range,MPIU_SCALAR,0,xin->comm);
652: PetscFree(lens);
654: PetscObjectName((PetscObject)xin);
655: PetscViewerMatlabPutArray(viewer,N,1,xx,xin->name);
657: PetscFree(xx);
658: } else {
659: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,xin->comm);
660: }
661: VecRestoreArray(xin,&xarray);
662: return(0);
663: }
664: #endif
666: #if defined(PETSC_HAVE_PNETCDF)
669: PetscErrorCode VecView_MPI_Netcdf(Vec xin,PetscViewer v)
670: {
672: int n = xin->map.n,ncid,xdim,xdim_num=1,xin_id,xstart;
673: PetscScalar *xarray;
676: VecGetArray(xin,&xarray);
677: PetscViewerNetcdfGetID(v,&ncid);
678: if (ncid < 0) SETERRQ(PETSC_ERR_ORDER,"First call PetscViewerNetcdfOpen to create NetCDF dataset");
679: /* define dimensions */
680: ncmpi_def_dim(ncid,"PETSc_Vector_Global_Size",xin->map.N,&xdim);
681: /* define variables */
682: ncmpi_def_var(ncid,"PETSc_Vector_MPI",NC_DOUBLE,xdim_num,&xdim,&xin_id);
683: /* leave define mode */
684: ncmpi_enddef(ncid);
685: /* store the vector */
686: VecGetOwnershipRange(xin,&xstart,PETSC_NULL);
687: ncmpi_put_vara_double_all(ncid,xin_id,(const MPI_Offset*)&xstart,(const MPI_Offset*)&n,xarray);
688: VecRestoreArray(xin,&xarray);
689: return(0);
690: }
691: #endif
693: #if defined(PETSC_HAVE_HDF4)
696: PetscErrorCode VecView_MPI_HDF4_Ex(Vec X, PetscViewer viewer, int d, int *dims)
697: {
699: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
700: int len, i, j, k, cur, bs, n, N;
701: MPI_Status status;
702: PetscScalar *x;
703: float *xlf, *xf;
707: bs = X->map.bs > 0 ? X->map.bs : 1;
708: N = X->map.N / bs;
709: n = X->map.n / bs;
711: /* For now, always convert to float */
712: PetscMalloc(N * sizeof(float), &xf);
713: PetscMalloc(n * sizeof(float), &xlf);
715: MPI_Comm_rank(X->comm, &rank);
716: MPI_Comm_size(X->comm, &size);
718: VecGetArray(X, &x);
720: for (k = 0; k < bs; k++) {
721: for (i = 0; i < n; i++) {
722: xlf[i] = (float) x[i*bs + k];
723: }
724: if (!rank) {
725: cur = 0;
726: PetscMemcpy(xf + cur, xlf, n * sizeof(float));
727: cur += n;
728: for (j = 1; j < size; j++) {
729: MPI_Recv(xf + cur, N - cur, MPI_FLOAT, j, tag, X->comm,&status);
730: MPI_Get_count(&status, MPI_FLOAT, &len);cur += len;
731: }
732: if (cur != N) {
733: SETERRQ2(PETSC_ERR_PLIB, "? %D %D", cur, N);
734: }
735: PetscViewerHDF4WriteSDS(viewer, xf, 2, dims, bs);
736: } else {
737: MPI_Send(xlf, n, MPI_FLOAT, 0, tag, X->comm);
738: }
739: }
740: VecRestoreArray(X, &x);
741: PetscFree(xlf);
742: PetscFree(xf);
743: return(0);
744: }
745: #endif
747: #if defined(PETSC_HAVE_HDF4)
750: PetscErrorCode VecView_MPI_HDF4(Vec xin,PetscViewer viewer)
751: {
753: PetscErrorCode bs, dims[1];
755: bs = xin->map.bs > 0 ? xin->map.bs : 1;
756: dims[0] = xin->map.N / bs;
757: VecView_MPI_HDF4_Ex(xin, viewer, 1, dims);
758: return(0);
759: }
760: #endif
764: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
765: {
767: PetscTruth iascii,issocket,isbinary,isdraw;
768: #if defined(PETSC_HAVE_MATHEMATICA)
769: PetscTruth ismathematica;
770: #endif
771: #if defined(PETSC_HAVE_NETCDF)
772: PetscTruth isnetcdf;
773: #endif
774: #if defined(PETSC_HAVE_HDF4)
775: PetscTruth ishdf4;
776: #endif
777: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
778: PetscTruth ismatlab;
779: #endif
782: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
783: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
784: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
785: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
786: #if defined(PETSC_HAVE_MATHEMATICA)
787: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATHEMATICA,&ismathematica);
788: #endif
789: #if defined(PETSC_HAVE_NETCDF)
790: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
791: #endif
792: #if defined(PETSC_HAVE_HDF4)
793: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF4,&ishdf4);
794: #endif
795: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
796: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATLAB,&ismatlab);
797: #endif
798: if (iascii){
799: VecView_MPI_ASCII(xin,viewer);
800: #if defined(PETSC_USE_SOCKET_VIEWER)
801: } else if (issocket) {
802: VecView_MPI_Socket(xin,viewer);
803: #endif
804: } else if (isbinary) {
805: VecView_MPI_Binary(xin,viewer);
806: } else if (isdraw) {
807: PetscViewerFormat format;
809: PetscViewerGetFormat(viewer,&format);
810: if (format == PETSC_VIEWER_DRAW_LG) {
811: VecView_MPI_Draw_LG(xin,viewer);
812: } else {
813: VecView_MPI_Draw(xin,viewer);
814: }
815: #if defined(PETSC_HAVE_MATHEMATICA)
816: } else if (ismathematica) {
817: PetscViewerMathematicaPutVector(viewer,xin);
818: #endif
819: #if defined(PETSC_HAVE_NETCDF)
820: } else if (isnetcdf) {
821: VecView_MPI_Netcdf(xin,viewer);
822: #endif
823: #if defined(PETSC_HAVE_HDF4)
824: } else if (ishdf4) {
825: VecView_MPI_HDF4(xin,viewer);
826: #endif
827: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
828: } else if (ismatlab) {
829: VecView_MPI_Matlab(xin,viewer);
830: #endif
831: } else {
832: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
833: }
834: return(0);
835: }
839: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
840: {
842: *N = xin->map.N;
843: return(0);
844: }
848: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
849: {
850: Vec_MPI *x = (Vec_MPI *)xin->data;
851: PetscScalar *xx = x->array;
852: PetscInt i,tmp,start = xin->map.range[xin->stash.rank];
855: for (i=0; i<ni; i++) {
856: tmp = ix[i] - start;
857: #if defined(PETSC_USE_DEBUG)
858: if (tmp < 0 || tmp >= xin->map.n) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
859: #endif
860: y[i] = xx[tmp];
861: }
862: return(0);
863: }
867: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
868: {
870: PetscMPIInt rank = xin->stash.rank;
871: PetscInt *owners = xin->map.range,start = owners[rank];
872: PetscInt end = owners[rank+1],i,row;
873: PetscScalar *xx;
876: #if defined(PETSC_USE_DEBUG)
877: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
878: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
879: } else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
880: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
881: }
882: #endif
883: VecGetArray(xin,&xx);
884: xin->stash.insertmode = addv;
886: if (addv == INSERT_VALUES) {
887: for (i=0; i<ni; i++) {
888: if ((row = ix[i]) >= start && row < end) {
889: xx[row-start] = y[i];
890: } else if (!xin->stash.donotstash) {
891: if (ix[i] < 0) continue;
892: #if defined(PETSC_USE_DEBUG)
893: if (ix[i] >= xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map.N);
894: #endif
895: VecStashValue_Private(&xin->stash,row,y[i]);
896: }
897: }
898: } else {
899: for (i=0; i<ni; i++) {
900: if ((row = ix[i]) >= start && row < end) {
901: xx[row-start] += y[i];
902: } else if (!xin->stash.donotstash) {
903: if (ix[i] < 0) continue;
904: #if defined(PETSC_USE_DEBUG)
905: if (ix[i] > xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map.N);
906: #endif
907: VecStashValue_Private(&xin->stash,row,y[i]);
908: }
909: }
910: }
911: VecRestoreArray(xin,&xx);
912: return(0);
913: }
917: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
918: {
919: PetscMPIInt rank = xin->stash.rank;
920: PetscInt *owners = xin->map.range,start = owners[rank];
922: PetscInt end = owners[rank+1],i,row,bs = xin->map.bs,j;
923: PetscScalar *xx,*y = (PetscScalar*)yin;
926: VecGetArray(xin,&xx);
927: #if defined(PETSC_USE_DEBUG)
928: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
929: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
930: }
931: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
932: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
933: }
934: #endif
935: xin->stash.insertmode = addv;
937: if (addv == INSERT_VALUES) {
938: for (i=0; i<ni; i++) {
939: if ((row = bs*ix[i]) >= start && row < end) {
940: for (j=0; j<bs; j++) {
941: xx[row-start+j] = y[j];
942: }
943: } else if (!xin->stash.donotstash) {
944: if (ix[i] < 0) continue;
945: #if defined(PETSC_USE_DEBUG)
946: if (ix[i] >= xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map.N);
947: #endif
948: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
949: }
950: y += bs;
951: }
952: } else {
953: for (i=0; i<ni; i++) {
954: if ((row = bs*ix[i]) >= start && row < end) {
955: for (j=0; j<bs; j++) {
956: xx[row-start+j] += y[j];
957: }
958: } else if (!xin->stash.donotstash) {
959: if (ix[i] < 0) continue;
960: #if defined(PETSC_USE_DEBUG)
961: if (ix[i] > xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map.N);
962: #endif
963: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
964: }
965: y += bs;
966: }
967: }
968: VecRestoreArray(xin,&xx);
969: return(0);
970: }
972: /*
973: Since nsends or nreceives may be zero we add 1 in certain mallocs
974: to make sure we never malloc an empty one.
975: */
978: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
979: {
981: PetscInt *owners = xin->map.range,*bowners,i,bs,nstash,reallocs;
982: PetscMPIInt size;
983: InsertMode addv;
984: MPI_Comm comm = xin->comm;
987: if (xin->stash.donotstash) {
988: return(0);
989: }
991: MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
992: if (addv == (ADD_VALUES|INSERT_VALUES)) {
993: SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
994: }
995: xin->stash.insertmode = addv; /* in case this processor had no cache */
996:
997: bs = xin->map.bs;
998: MPI_Comm_size(xin->comm,&size);
999: if (!xin->bstash.bowners && xin->map.bs != -1) {
1000: PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
1001: for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
1002: xin->bstash.bowners = bowners;
1003: } else {
1004: bowners = xin->bstash.bowners;
1005: }
1006: VecStashScatterBegin_Private(&xin->stash,owners);
1007: VecStashScatterBegin_Private(&xin->bstash,bowners);
1008: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1009: PetscInfo2(0,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1010: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1011: PetscInfo2(0,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1013: return(0);
1014: }
1018: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1019: {
1021: PetscInt base,i,j,*row,flg,bs;
1022: PetscMPIInt n;
1023: PetscScalar *val,*vv,*array,*xarray;
1026: if (!vec->stash.donotstash) {
1027: VecGetArray(vec,&xarray);
1028: base = vec->map.range[vec->stash.rank];
1029: bs = vec->map.bs;
1031: /* Process the stash */
1032: while (1) {
1033: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1034: if (!flg) break;
1035: if (vec->stash.insertmode == ADD_VALUES) {
1036: for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
1037: } else if (vec->stash.insertmode == INSERT_VALUES) {
1038: for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
1039: } else {
1040: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1041: }
1042: }
1043: VecStashScatterEnd_Private(&vec->stash);
1045: /* now process the block-stash */
1046: while (1) {
1047: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1048: if (!flg) break;
1049: for (i=0; i<n; i++) {
1050: array = xarray+row[i]*bs-base;
1051: vv = val+i*bs;
1052: if (vec->stash.insertmode == ADD_VALUES) {
1053: for (j=0; j<bs; j++) { array[j] += vv[j];}
1054: } else if (vec->stash.insertmode == INSERT_VALUES) {
1055: for (j=0; j<bs; j++) { array[j] = vv[j]; }
1056: } else {
1057: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1058: }
1059: }
1060: }
1061: VecStashScatterEnd_Private(&vec->bstash);
1062: VecRestoreArray(vec,&xarray);
1063: }
1064: vec->stash.insertmode = NOT_SET_VALUES;
1065: return(0);
1066: }