Actual source code: pdvec.c
1: #define PETSCVEC_DLL
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include src/vec/vec/impls/mpi/pvecimpl.h
6: #if defined(PETSC_HAVE_PNETCDF)
8: #include "pnetcdf.h"
10: #endif
14: PetscErrorCode VecDestroy_MPI(Vec v)
15: {
16: Vec_MPI *x = (Vec_MPI*)v->data;
20: #if defined(PETSC_USE_LOG)
21: PetscLogObjectState((PetscObject)v,"Length=%D",v->map.N);
22: #endif
23: PetscFree(x->array_allocated);
25: /* Destroy local representation of vector if it exists */
26: if (x->localrep) {
27: VecDestroy(x->localrep);
28: if (x->localupdate) {VecScatterDestroy(x->localupdate);}
29: }
30: /* Destroy the stashes: note the order - so that the tags are freed properly */
31: VecStashDestroy_Private(&v->bstash);
32: VecStashDestroy_Private(&v->stash);
33: PetscFree(x);
34: return(0);
35: }
39: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
40: {
41: PetscErrorCode ierr;
42: PetscInt i,work = xin->map.n,cnt,len,nLen;
43: PetscMPIInt j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
44: MPI_Status status;
45: PetscScalar *values,*xarray;
46: const char *name;
47: PetscViewerFormat format;
50: VecGetArray(xin,&xarray);
51: /* determine maximum message to arrive */
52: MPI_Comm_rank(xin->comm,&rank);
53: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
54: MPI_Comm_size(xin->comm,&size);
56: if (!rank) {
57: PetscMalloc((len+1)*sizeof(PetscScalar),&values);
58: PetscViewerGetFormat(viewer,&format);
59: /*
60: Matlab format and ASCII format are very similar except
61: Matlab uses %18.16e format while ASCII uses %g
62: */
63: if (format == PETSC_VIEWER_ASCII_MATLAB) {
64: PetscObjectGetName((PetscObject)xin,&name);
65: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
66: for (i=0; i<xin->map.n; i++) {
67: #if defined(PETSC_USE_COMPLEX)
68: if (PetscImaginaryPart(xarray[i]) > 0.0) {
69: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
70: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
71: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
72: } else {
73: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
74: }
75: #else
76: PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
77: #endif
78: }
79: /* receive and print messages */
80: for (j=1; j<size; j++) {
81: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
82: MPI_Get_count(&status,MPIU_SCALAR,&n);
83: for (i=0; i<n; i++) {
84: #if defined(PETSC_USE_COMPLEX)
85: if (PetscImaginaryPart(values[i]) > 0.0) {
86: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
87: } else if (PetscImaginaryPart(values[i]) < 0.0) {
88: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
89: } else {
90: PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
91: }
92: #else
93: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
94: #endif
95: }
96: }
97: PetscViewerASCIIPrintf(viewer,"];\n");
99: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
100: for (i=0; i<xin->map.n; i++) {
101: #if defined(PETSC_USE_COMPLEX)
102: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
103: #else
104: PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
105: #endif
106: }
107: /* receive and print messages */
108: for (j=1; j<size; j++) {
109: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
110: MPI_Get_count(&status,MPIU_SCALAR,&n);
111: for (i=0; i<n; i++) {
112: #if defined(PETSC_USE_COMPLEX)
113: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
114: #else
115: PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
116: #endif
117: }
118: }
119: } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
120: /*
121: state 0: No header has been output
122: state 1: Only POINT_DATA has been output
123: state 2: Only CELL_DATA has been output
124: state 3: Output both, POINT_DATA last
125: state 4: Output both, CELL_DATA last
126: */
127: static PetscInt stateId = -1;
128: int outputState = 0;
129: PetscTruth hasState;
130: int doOutput = 0;
131: PetscInt bs, b;
133: if (stateId < 0) {
134: PetscObjectComposedDataRegister(&stateId);
135: }
136: PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
137: if (!hasState) {
138: outputState = 0;
139: }
140: PetscObjectGetName((PetscObject) xin, &name);
141: VecGetLocalSize(xin, &nLen);
142: n = (PetscMPIInt) nLen;
143: VecGetBlockSize(xin, &bs);
144: if ((bs < 1) || (bs > 3)) {
145: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
146: }
147: if (format == PETSC_VIEWER_ASCII_VTK) {
148: if (outputState == 0) {
149: outputState = 1;
150: doOutput = 1;
151: } else if (outputState == 1) {
152: doOutput = 0;
153: } else if (outputState == 2) {
154: outputState = 3;
155: doOutput = 1;
156: } else if (outputState == 3) {
157: doOutput = 0;
158: } else if (outputState == 4) {
159: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
160: }
161: if (doOutput) {
162: PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map.N);
163: }
164: } else {
165: if (outputState == 0) {
166: outputState = 2;
167: doOutput = 1;
168: } else if (outputState == 1) {
169: outputState = 4;
170: doOutput = 1;
171: } else if (outputState == 2) {
172: doOutput = 0;
173: } else if (outputState == 3) {
174: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
175: } else if (outputState == 4) {
176: doOutput = 0;
177: }
178: if (doOutput) {
179: PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map.N);
180: }
181: }
182: PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
183: if (name) {
184: PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
185: } else {
186: PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
187: }
188: PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
189: for (i=0; i<n/bs; i++) {
190: for (b=0; b<bs; b++) {
191: if (b > 0) {
192: PetscViewerASCIIPrintf(viewer," ");
193: }
194: #if !defined(PETSC_USE_COMPLEX)
195: PetscViewerASCIIPrintf(viewer,"%G",xarray[i*bs+b]);
196: #endif
197: }
198: PetscViewerASCIIPrintf(viewer,"\n");
199: }
200: for (j=1; j<size; j++) {
201: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
202: MPI_Get_count(&status,MPIU_SCALAR,&n);
203: for (i=0; i<n/bs; i++) {
204: for (b=0; b<bs; b++) {
205: if (b > 0) {
206: PetscViewerASCIIPrintf(viewer," ");
207: }
208: #if !defined(PETSC_USE_COMPLEX)
209: PetscViewerASCIIPrintf(viewer,"%G",values[i*bs+b]);
210: #endif
211: }
212: PetscViewerASCIIPrintf(viewer,"\n");
213: }
214: }
215: } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
216: PetscInt bs, b;
218: VecGetLocalSize(xin, &nLen);
219: n = (PetscMPIInt) nLen;
220: VecGetBlockSize(xin, &bs);
221: if ((bs < 1) || (bs > 3)) {
222: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
223: }
224: for (i=0; i<n/bs; i++) {
225: for (b=0; b<bs; b++) {
226: if (b > 0) {
227: PetscViewerASCIIPrintf(viewer," ");
228: }
229: #if !defined(PETSC_USE_COMPLEX)
230: PetscViewerASCIIPrintf(viewer,"%G",xarray[i*bs+b]);
231: #endif
232: }
233: for (b=bs; b<3; b++) {
234: PetscViewerASCIIPrintf(viewer," 0.0");
235: }
236: PetscViewerASCIIPrintf(viewer,"\n");
237: }
238: for (j=1; j<size; j++) {
239: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
240: MPI_Get_count(&status,MPIU_SCALAR,&n);
241: for (i=0; i<n/bs; i++) {
242: for (b=0; b<bs; b++) {
243: if (b > 0) {
244: PetscViewerASCIIPrintf(viewer," ");
245: }
246: #if !defined(PETSC_USE_COMPLEX)
247: PetscViewerASCIIPrintf(viewer,"%G",values[i*bs+b]);
248: #endif
249: }
250: for (b=bs; b<3; b++) {
251: PetscViewerASCIIPrintf(viewer," 0.0");
252: }
253: PetscViewerASCIIPrintf(viewer,"\n");
254: }
255: }
256: } else if (format == PETSC_VIEWER_ASCII_PCICE) {
257: PetscInt bs, b, vertexCount = 1;
259: VecGetLocalSize(xin, &nLen);
260: n = (PetscMPIInt) nLen;
261: VecGetBlockSize(xin, &bs);
262: if ((bs < 1) || (bs > 3)) {
263: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
264: }
265: PetscViewerASCIIPrintf(viewer,"%D\n", xin->map.N/bs);
266: for (i=0; i<n/bs; i++) {
267: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
268: for (b=0; b<bs; b++) {
269: if (b > 0) {
270: PetscViewerASCIIPrintf(viewer," ");
271: }
272: #if !defined(PETSC_USE_COMPLEX)
273: PetscViewerASCIIPrintf(viewer,"% 12.5E",xarray[i*bs+b]);
274: #endif
275: }
276: PetscViewerASCIIPrintf(viewer,"\n");
277: }
278: for (j=1; j<size; j++) {
279: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
280: MPI_Get_count(&status,MPIU_SCALAR,&n);
281: for (i=0; i<n/bs; i++) {
282: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
283: for (b=0; b<bs; b++) {
284: if (b > 0) {
285: PetscViewerASCIIPrintf(viewer," ");
286: }
287: #if !defined(PETSC_USE_COMPLEX)
288: PetscViewerASCIIPrintf(viewer,"% 12.5E",values[i*bs+b]);
289: #endif
290: }
291: PetscViewerASCIIPrintf(viewer,"\n");
292: }
293: }
294: } else if (format == PETSC_VIEWER_ASCII_PYLITH) {
295: PetscInt bs, b, vertexCount = 1;
297: VecGetLocalSize(xin, &nLen);
298: n = (PetscMPIInt) nLen;
299: VecGetBlockSize(xin, &bs);
300: if (bs != 3) {
301: SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PyLith can only handle 3D objects, but vector dimension is %d", bs);
302: }
303: PetscViewerASCIIPrintf(viewer,"#\n");
304: PetscViewerASCIIPrintf(viewer,"# Node X-coord Y-coord Z-coord\n");
305: PetscViewerASCIIPrintf(viewer,"#\n");
306: for (i=0; i<n/bs; i++) {
307: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
308: for (b=0; b<bs; b++) {
309: if (b > 0) {
310: PetscViewerASCIIPrintf(viewer," ");
311: }
312: #if !defined(PETSC_USE_COMPLEX)
313: PetscViewerASCIIPrintf(viewer,"% 16.8E",xarray[i*bs+b]);
314: #endif
315: }
316: PetscViewerASCIIPrintf(viewer,"\n");
317: }
318: for (j=1; j<size; j++) {
319: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
320: MPI_Get_count(&status,MPIU_SCALAR,&n);
321: for (i=0; i<n/bs; i++) {
322: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
323: for (b=0; b<bs; b++) {
324: if (b > 0) {
325: PetscViewerASCIIPrintf(viewer," ");
326: }
327: #if !defined(PETSC_USE_COMPLEX)
328: PetscViewerASCIIPrintf(viewer,"% 16.8E",values[i*bs+b]);
329: #endif
330: }
331: PetscViewerASCIIPrintf(viewer,"\n");
332: }
333: }
334: } else {
335: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
336: cnt = 0;
337: for (i=0; i<xin->map.n; i++) {
338: if (format == PETSC_VIEWER_ASCII_INDEX) {
339: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
340: }
341: #if defined(PETSC_USE_COMPLEX)
342: if (PetscImaginaryPart(xarray[i]) > 0.0) {
343: PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
344: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
345: PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
346: } else {
347: PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(xarray[i]));
348: }
349: #else
350: PetscViewerASCIIPrintf(viewer,"%G\n",xarray[i]);
351: #endif
352: }
353: /* receive and print messages */
354: for (j=1; j<size; j++) {
355: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
356: MPI_Get_count(&status,MPIU_SCALAR,&n);
357: if (format != PETSC_VIEWER_ASCII_COMMON) {
358: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
359: }
360: for (i=0; i<n; i++) {
361: if (format == PETSC_VIEWER_ASCII_INDEX) {
362: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
363: }
364: #if defined(PETSC_USE_COMPLEX)
365: if (PetscImaginaryPart(values[i]) > 0.0) {
366: PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
367: } else if (PetscImaginaryPart(values[i]) < 0.0) {
368: PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
369: } else {
370: PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(values[i]));
371: }
372: #else
373: PetscViewerASCIIPrintf(viewer,"%G\n",values[i]);
374: #endif
375: }
376: }
377: }
378: PetscFree(values);
379: } else {
380: /* send values */
381: MPI_Send(xarray,xin->map.n,MPIU_SCALAR,0,tag,xin->comm);
382: }
383: PetscViewerFlush(viewer);
384: VecRestoreArray(xin,&xarray);
385: return(0);
386: }
390: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
391: {
393: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag,n;
394: PetscInt len,work = xin->map.n,j;
395: int fdes;
396: MPI_Status status;
397: PetscScalar *values,*xarray;
398: FILE *file;
401: VecGetArray(xin,&xarray);
402: PetscViewerBinaryGetDescriptor(viewer,&fdes);
404: /* determine maximum message to arrive */
405: MPI_Comm_rank(xin->comm,&rank);
406: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
407: MPI_Comm_size(xin->comm,&size);
409: if (!rank) {
410: PetscInt cookie = VEC_FILE_COOKIE;
411: PetscBinaryWrite(fdes,&cookie,1,PETSC_INT,PETSC_FALSE);
412: PetscBinaryWrite(fdes,&xin->map.N,1,PETSC_INT,PETSC_FALSE);
413: PetscBinaryWrite(fdes,xarray,xin->map.n,PETSC_SCALAR,PETSC_FALSE);
415: PetscMalloc((len+1)*sizeof(PetscScalar),&values);
416: /* receive and print messages */
417: for (j=1; j<size; j++) {
418: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
419: MPI_Get_count(&status,MPIU_SCALAR,&n);
420: PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_FALSE);
421: }
422: PetscFree(values);
423: PetscViewerBinaryGetInfoPointer(viewer,&file);
424: if (file && xin->map.bs > 1) {
425: if (xin->prefix) {
426: PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",xin->prefix,xin->map.bs);
427: } else {
428: PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->map.bs);
429: }
430: }
431: } else {
432: /* send values */
433: MPI_Send(xarray,xin->map.n,MPIU_SCALAR,0,tag,xin->comm);
434: }
435: VecRestoreArray(xin,&xarray);
436: return(0);
437: }
441: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
442: {
443: PetscDraw draw;
444: PetscTruth isnull;
447: #if defined(PETSC_USE_64BIT_INDICES)
449: PetscViewerDrawGetDraw(viewer,0,&draw);
450: PetscDrawIsNull(draw,&isnull);
451: if (isnull) return(0);
452: SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
453: #else
454: PetscMPIInt size,rank;
455: int i,N = xin->map.N,*lens;
456: PetscReal *xx,*yy;
457: PetscDrawLG lg;
458: PetscScalar *xarray;
461: PetscViewerDrawGetDraw(viewer,0,&draw);
462: PetscDrawIsNull(draw,&isnull);
463: if (isnull) return(0);
465: VecGetArray(xin,&xarray);
466: PetscViewerDrawGetDrawLG(viewer,0,&lg);
467: PetscDrawCheckResizedWindow(draw);
468: MPI_Comm_rank(xin->comm,&rank);
469: MPI_Comm_size(xin->comm,&size);
470: if (!rank) {
471: PetscDrawLGReset(lg);
472: PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
473: for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
474: yy = xx + N;
475: PetscMalloc(size*sizeof(PetscInt),&lens);
476: for (i=0; i<size; i++) {
477: lens[i] = xin->map.range[i+1] - xin->map.range[i];
478: }
479: #if !defined(PETSC_USE_COMPLEX)
480: MPI_Gatherv(xarray,xin->map.n,MPIU_REAL,yy,lens,xin->map.range,MPIU_REAL,0,xin->comm);
481: #else
482: {
483: PetscReal *xr;
484: PetscMalloc((xin->map.n+1)*sizeof(PetscReal),&xr);
485: for (i=0; i<xin->map.n; i++) {
486: xr[i] = PetscRealPart(xarray[i]);
487: }
488: MPI_Gatherv(xr,xin->map.n,MPIU_REAL,yy,lens,xin->map.range,MPIU_REAL,0,xin->comm);
489: PetscFree(xr);
490: }
491: #endif
492: PetscFree(lens);
493: PetscDrawLGAddPoints(lg,N,&xx,&yy);
494: PetscFree(xx);
495: } else {
496: #if !defined(PETSC_USE_COMPLEX)
497: MPI_Gatherv(xarray,xin->map.n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
498: #else
499: {
500: PetscReal *xr;
501: PetscMalloc((xin->map.n+1)*sizeof(PetscReal),&xr);
502: for (i=0; i<xin->map.n; i++) {
503: xr[i] = PetscRealPart(xarray[i]);
504: }
505: MPI_Gatherv(xr,xin->map.n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
506: PetscFree(xr);
507: }
508: #endif
509: }
510: PetscDrawLGDraw(lg);
511: PetscDrawSynchronizedFlush(draw);
512: VecRestoreArray(xin,&xarray);
513: #endif
514: return(0);
515: }
518: /* I am assuming this is Extern 'C' because it is dynamically loaded. If not, we can remove the DLLEXPORT tag */
521: PetscErrorCode VecView_MPI_Draw(Vec xin,PetscViewer viewer)
522: {
524: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
525: PetscInt i,start,end;
526: MPI_Status status;
527: PetscReal coors[4],ymin,ymax,xmin,xmax,tmp;
528: PetscDraw draw;
529: PetscTruth isnull;
530: PetscDrawAxis axis;
531: PetscScalar *xarray;
534: PetscViewerDrawGetDraw(viewer,0,&draw);
535: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
537: VecGetArray(xin,&xarray);
538: PetscDrawCheckResizedWindow(draw);
539: xmin = 1.e20; xmax = -1.e20;
540: for (i=0; i<xin->map.n; i++) {
541: #if defined(PETSC_USE_COMPLEX)
542: if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
543: if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
544: #else
545: if (xarray[i] < xmin) xmin = xarray[i];
546: if (xarray[i] > xmax) xmax = xarray[i];
547: #endif
548: }
549: if (xmin + 1.e-10 > xmax) {
550: xmin -= 1.e-5;
551: xmax += 1.e-5;
552: }
553: MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPI_MIN,0,xin->comm);
554: MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPI_MAX,0,xin->comm);
555: MPI_Comm_size(xin->comm,&size);
556: MPI_Comm_rank(xin->comm,&rank);
557: PetscDrawAxisCreate(draw,&axis);
558: PetscLogObjectParent(draw,axis);
559: if (!rank) {
560: PetscDrawClear(draw);
561: PetscDrawFlush(draw);
562: PetscDrawAxisSetLimits(axis,0.0,(double)xin->map.N,ymin,ymax);
563: PetscDrawAxisDraw(axis);
564: PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
565: }
566: PetscDrawAxisDestroy(axis);
567: MPI_Bcast(coors,4,MPIU_REAL,0,xin->comm);
568: if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
569: /* draw local part of vector */
570: VecGetOwnershipRange(xin,&start,&end);
571: if (rank < size-1) { /*send value to right */
572: MPI_Send(&xarray[xin->map.n-1],1,MPIU_REAL,rank+1,tag,xin->comm);
573: }
574: for (i=1; i<xin->map.n; i++) {
575: #if !defined(PETSC_USE_COMPLEX)
576: PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
577: xarray[i],PETSC_DRAW_RED);
578: #else
579: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
580: PetscRealPart(xarray[i]),PETSC_DRAW_RED);
581: #endif
582: }
583: if (rank) { /* receive value from right */
584: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,xin->comm,&status);
585: #if !defined(PETSC_USE_COMPLEX)
586: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
587: #else
588: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
589: #endif
590: }
591: PetscDrawSynchronizedFlush(draw);
592: PetscDrawPause(draw);
593: VecRestoreArray(xin,&xarray);
594: return(0);
595: }
598: #if defined(PETSC_HAVE_MATLAB_ENGINE)
601: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
602: {
604: PetscMPIInt rank,size,*lens;
605: PetscInt i,N = xin->map.N;
606: PetscScalar *xx,*xarray;
609: VecGetArray(xin,&xarray);
610: MPI_Comm_rank(xin->comm,&rank);
611: MPI_Comm_size(xin->comm,&size);
612: if (!rank) {
613: PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
614: PetscMalloc(size*sizeof(PetscMPIInt),&lens);
615: for (i=0; i<size; i++) {
616: lens[i] = xin->map.range[i+1] - xin->map.range[i];
617: }
618: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,xx,lens,xin->map.range,MPIU_SCALAR,0,xin->comm);
619: PetscFree(lens);
621: PetscObjectName((PetscObject)xin);
622: PetscViewerMatlabPutArray(viewer,N,1,xx,xin->name);
624: PetscFree(xx);
625: } else {
626: MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,xin->comm);
627: }
628: VecRestoreArray(xin,&xarray);
629: return(0);
630: }
631: #endif
633: #if defined(PETSC_HAVE_PNETCDF)
636: PetscErrorCode VecView_MPI_Netcdf(Vec xin,PetscViewer v)
637: {
639: int n = xin->map.n,ncid,xdim,xdim_num=1,xin_id,xstart;
640: PetscScalar *xarray;
643: VecGetArray(xin,&xarray);
644: PetscViewerNetcdfGetID(v,&ncid);
645: if (ncid < 0) SETERRQ(PETSC_ERR_ORDER,"First call PetscViewerNetcdfOpen to create NetCDF dataset");
646: /* define dimensions */
647: ncmpi_def_dim(ncid,"PETSc_Vector_Global_Size",xin->map.N,&xdim);
648: /* define variables */
649: ncmpi_def_var(ncid,"PETSc_Vector_MPI",NC_DOUBLE,xdim_num,&xdim,&xin_id);
650: /* leave define mode */
651: ncmpi_enddef(ncid);
652: /* store the vector */
653: VecGetOwnershipRange(xin,&xstart,PETSC_NULL);
654: ncmpi_put_vara_double_all(ncid,xin_id,(const MPI_Offset*)&xstart,(const MPI_Offset*)&n,xarray);
655: VecRestoreArray(xin,&xarray);
656: return(0);
657: }
658: #endif
660: #if defined(PETSC_HAVE_HDF4)
663: PetscErrorCode VecView_MPI_HDF4_Ex(Vec X, PetscViewer viewer, int d, int *dims)
664: {
666: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
667: int len, i, j, k, cur, bs, n, N;
668: MPI_Status status;
669: PetscScalar *x;
670: float *xlf, *xf;
674: bs = X->map.bs > 0 ? X->map.bs : 1;
675: N = X->map.N / bs;
676: n = X->map.n / bs;
678: /* For now, always convert to float */
679: PetscMalloc(N * sizeof(float), &xf);
680: PetscMalloc(n * sizeof(float), &xlf);
682: MPI_Comm_rank(X->comm, &rank);
683: MPI_Comm_size(X->comm, &size);
685: VecGetArray(X, &x);
687: for (k = 0; k < bs; k++) {
688: for (i = 0; i < n; i++) {
689: xlf[i] = (float) x[i*bs + k];
690: }
691: if (!rank) {
692: cur = 0;
693: PetscMemcpy(xf + cur, xlf, n * sizeof(float));
694: cur += n;
695: for (j = 1; j < size; j++) {
696: MPI_Recv(xf + cur, N - cur, MPI_FLOAT, j, tag, X->comm,&status);
697: MPI_Get_count(&status, MPI_FLOAT, &len);cur += len;
698: }
699: if (cur != N) {
700: SETERRQ2(PETSC_ERR_PLIB, "? %D %D", cur, N);
701: }
702: PetscViewerHDF4WriteSDS(viewer, xf, 2, dims, bs);
703: } else {
704: MPI_Send(xlf, n, MPI_FLOAT, 0, tag, X->comm);
705: }
706: }
707: VecRestoreArray(X, &x);
708: PetscFree(xlf);
709: PetscFree(xf);
710: return(0);
711: }
712: #endif
714: #if defined(PETSC_HAVE_HDF4)
717: PetscErrorCode VecView_MPI_HDF4(Vec xin,PetscViewer viewer)
718: {
720: PetscErrorCode bs, dims[1];
722: bs = xin->map.bs > 0 ? xin->map.bs : 1;
723: dims[0] = xin->map.N / bs;
724: VecView_MPI_HDF4_Ex(xin, viewer, 1, dims);
725: return(0);
726: }
727: #endif
731: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
732: {
734: PetscTruth iascii,isbinary,isdraw;
735: #if defined(PETSC_HAVE_MATHEMATICA)
736: PetscTruth ismathematica;
737: #endif
738: #if defined(PETSC_HAVE_NETCDF)
739: PetscTruth isnetcdf;
740: #endif
741: #if defined(PETSC_HAVE_HDF4)
742: PetscTruth ishdf4;
743: #endif
744: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
745: PetscTruth ismatlab;
746: #endif
749: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
750: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
751: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
752: #if defined(PETSC_HAVE_MATHEMATICA)
753: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATHEMATICA,&ismathematica);
754: #endif
755: #if defined(PETSC_HAVE_NETCDF)
756: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
757: #endif
758: #if defined(PETSC_HAVE_HDF4)
759: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF4,&ishdf4);
760: #endif
761: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
762: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATLAB,&ismatlab);
763: #endif
764: if (iascii){
765: VecView_MPI_ASCII(xin,viewer);
766: } else if (isbinary) {
767: VecView_MPI_Binary(xin,viewer);
768: } else if (isdraw) {
769: PetscViewerFormat format;
771: PetscViewerGetFormat(viewer,&format);
772: if (format == PETSC_VIEWER_DRAW_LG) {
773: VecView_MPI_Draw_LG(xin,viewer);
774: } else {
775: VecView_MPI_Draw(xin,viewer);
776: }
777: #if defined(PETSC_HAVE_MATHEMATICA)
778: } else if (ismathematica) {
779: PetscViewerMathematicaPutVector(viewer,xin);
780: #endif
781: #if defined(PETSC_HAVE_NETCDF)
782: } else if (isnetcdf) {
783: VecView_MPI_Netcdf(xin,viewer);
784: #endif
785: #if defined(PETSC_HAVE_HDF4)
786: } else if (ishdf4) {
787: VecView_MPI_HDF4(xin,viewer);
788: #endif
789: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
790: } else if (ismatlab) {
791: VecView_MPI_Matlab(xin,viewer);
792: #endif
793: } else {
794: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
795: }
796: return(0);
797: }
801: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
802: {
804: *N = xin->map.N;
805: return(0);
806: }
810: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
811: {
812: Vec_MPI *x = (Vec_MPI *)xin->data;
813: PetscScalar *xx = x->array;
814: PetscInt i,tmp,start = xin->map.range[xin->stash.rank];
817: for (i=0; i<ni; i++) {
818: if (xin->stash.ignorenegidx == PETSC_TRUE && ix[i] < 0) continue;
819: tmp = ix[i] - start;
820: #if defined(PETSC_USE_DEBUG)
821: if (tmp < 0 || tmp >= xin->map.n) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
822: #endif
823: y[i] = xx[tmp];
824: }
825: return(0);
826: }
830: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
831: {
833: PetscMPIInt rank = xin->stash.rank;
834: PetscInt *owners = xin->map.range,start = owners[rank];
835: PetscInt end = owners[rank+1],i,row;
836: PetscScalar *xx;
839: #if defined(PETSC_USE_DEBUG)
840: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
841: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
842: } else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
843: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
844: }
845: #endif
846: VecGetArray(xin,&xx);
847: xin->stash.insertmode = addv;
849: if (addv == INSERT_VALUES) {
850: for (i=0; i<ni; i++) {
851: if (xin->stash.ignorenegidx == PETSC_TRUE && ix[i] < 0) continue;
852: #if defined(PETSC_USE_DEBUG)
853: else if (xin->stash.ignorenegidx == PETSC_FALSE && ix[i] < 0) {
854: SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
855: }
856: #endif
857: if ((row = ix[i]) >= start && row < end) {
858: xx[row-start] = y[i];
859: } else if (!xin->stash.donotstash) {
860: #if defined(PETSC_USE_DEBUG)
861: if (ix[i] >= xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map.N);
862: #endif
863: VecStashValue_Private(&xin->stash,row,y[i]);
864: }
865: }
866: } else {
867: for (i=0; i<ni; i++) {
868: if (xin->stash.ignorenegidx == PETSC_TRUE && ix[i] < 0) continue;
869: #if defined(PETSC_USE_DEBUG)
870: else if (xin->stash.ignorenegidx == PETSC_FALSE && ix[i] < 0) {
871: SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
872: }
873: #endif
874: if ((row = ix[i]) >= start && row < end) {
875: xx[row-start] += y[i];
876: } else if (!xin->stash.donotstash) {
877: #if defined(PETSC_USE_DEBUG)
878: if (ix[i] > xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map.N);
879: #endif
880: VecStashValue_Private(&xin->stash,row,y[i]);
881: }
882: }
883: }
884: VecRestoreArray(xin,&xx);
885: return(0);
886: }
890: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
891: {
892: PetscMPIInt rank = xin->stash.rank;
893: PetscInt *owners = xin->map.range,start = owners[rank];
895: PetscInt end = owners[rank+1],i,row,bs = xin->map.bs,j;
896: PetscScalar *xx,*y = (PetscScalar*)yin;
899: VecGetArray(xin,&xx);
900: #if defined(PETSC_USE_DEBUG)
901: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
902: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
903: }
904: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
905: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
906: }
907: #endif
908: xin->stash.insertmode = addv;
910: if (addv == INSERT_VALUES) {
911: for (i=0; i<ni; i++) {
912: if ((row = bs*ix[i]) >= start && row < end) {
913: for (j=0; j<bs; j++) {
914: xx[row-start+j] = y[j];
915: }
916: } else if (!xin->stash.donotstash) {
917: if (ix[i] < 0) continue;
918: #if defined(PETSC_USE_DEBUG)
919: if (ix[i] >= xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map.N);
920: #endif
921: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
922: }
923: y += bs;
924: }
925: } else {
926: for (i=0; i<ni; i++) {
927: if ((row = bs*ix[i]) >= start && row < end) {
928: for (j=0; j<bs; j++) {
929: xx[row-start+j] += y[j];
930: }
931: } else if (!xin->stash.donotstash) {
932: if (ix[i] < 0) continue;
933: #if defined(PETSC_USE_DEBUG)
934: if (ix[i] > xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map.N);
935: #endif
936: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
937: }
938: y += bs;
939: }
940: }
941: VecRestoreArray(xin,&xx);
942: return(0);
943: }
945: /*
946: Since nsends or nreceives may be zero we add 1 in certain mallocs
947: to make sure we never malloc an empty one.
948: */
951: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
952: {
954: PetscInt *owners = xin->map.range,*bowners,i,bs,nstash,reallocs;
955: PetscMPIInt size;
956: InsertMode addv;
957: MPI_Comm comm = xin->comm;
960: if (xin->stash.donotstash) {
961: return(0);
962: }
964: MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
965: if (addv == (ADD_VALUES|INSERT_VALUES)) {
966: SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
967: }
968: xin->stash.insertmode = addv; /* in case this processor had no cache */
969:
970: bs = xin->map.bs;
971: MPI_Comm_size(xin->comm,&size);
972: if (!xin->bstash.bowners && xin->map.bs != -1) {
973: PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
974: for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
975: xin->bstash.bowners = bowners;
976: } else {
977: bowners = xin->bstash.bowners;
978: }
979: VecStashScatterBegin_Private(&xin->stash,owners);
980: VecStashScatterBegin_Private(&xin->bstash,bowners);
981: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
982: PetscInfo2(0,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
983: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
984: PetscInfo2(0,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
986: return(0);
987: }
991: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
992: {
994: PetscInt base,i,j,*row,flg,bs;
995: PetscMPIInt n;
996: PetscScalar *val,*vv,*array,*xarray;
999: if (!vec->stash.donotstash) {
1000: VecGetArray(vec,&xarray);
1001: base = vec->map.range[vec->stash.rank];
1002: bs = vec->map.bs;
1004: /* Process the stash */
1005: while (1) {
1006: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1007: if (!flg) break;
1008: if (vec->stash.insertmode == ADD_VALUES) {
1009: for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
1010: } else if (vec->stash.insertmode == INSERT_VALUES) {
1011: for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
1012: } else {
1013: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1014: }
1015: }
1016: VecStashScatterEnd_Private(&vec->stash);
1018: /* now process the block-stash */
1019: while (1) {
1020: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1021: if (!flg) break;
1022: for (i=0; i<n; i++) {
1023: array = xarray+row[i]*bs-base;
1024: vv = val+i*bs;
1025: if (vec->stash.insertmode == ADD_VALUES) {
1026: for (j=0; j<bs; j++) { array[j] += vv[j];}
1027: } else if (vec->stash.insertmode == INSERT_VALUES) {
1028: for (j=0; j<bs; j++) { array[j] = vv[j]; }
1029: } else {
1030: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1031: }
1032: }
1033: }
1034: VecStashScatterEnd_Private(&vec->bstash);
1035: VecRestoreArray(vec,&xarray);
1036: }
1037: vec->stash.insertmode = NOT_SET_VALUES;
1038: return(0);
1039: }