Actual source code: da2.c
1: #define PETSCDM_DLL
2:
3: #include src/dm/da/daimpl.h
7: /*@C
8: DAGetElements - Gets an array containing the indices (in local coordinates)
9: of all the local elements
11: Not Collective
13: Input Parameter:
14: . da - the DA object
16: Output Parameters:
17: + n - number of local elements
18: - e - the indices of the elements vertices
20: Level: intermediate
22: .seealso: DAElementType, DASetElementType(), DARestoreElements()
23: @*/
24: PetscErrorCode DAGetElements(DA da,PetscInt *n,const PetscInt *e[])
25: {
29: (da->ops->getelements)(da,n,e);
30: return(0);
31: }
35: /*@C
36: DARestoreElements - Returns an array containing the indices (in local coordinates)
37: of all the local elements obtained with DAGetElements()
39: Not Collective
41: Input Parameter:
42: + da - the DA object
43: . n - number of local elements
44: - e - the indices of the elements vertices
46: Level: intermediate
48: .seealso: DAElementType, DASetElementType(), DAGetElements()
49: @*/
50: PetscErrorCode DARestoreElements(DA da,PetscInt *n,const PetscInt *e[])
51: {
55: if (da->ops->restoreelements) {
56: (da->ops->restoreelements)(da,n,e);
57: }
58: return(0);
59: }
63: PetscErrorCode DAGetOwnershipRange(DA da,PetscInt **lx,PetscInt **ly,PetscInt **lz)
64: {
67: if (lx) *lx = da->lx;
68: if (ly) *ly = da->ly;
69: if (lz) *lz = da->lz;
70: return(0);
71: }
75: PetscErrorCode DAView_2d(DA da,PetscViewer viewer)
76: {
78: PetscMPIInt rank;
79: PetscTruth iascii,isdraw;
82: MPI_Comm_rank(da->comm,&rank);
84: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
85: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
86: if (iascii) {
87: PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,da->M,
88: da->N,da->m,da->n,da->w,da->s);
89: PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",da->xs,da->xe,da->ys,da->ye);
90: PetscViewerFlush(viewer);
91: } else if (isdraw) {
92: PetscDraw draw;
93: double ymin = -1*da->s-1,ymax = da->N+da->s;
94: double xmin = -1*da->s-1,xmax = da->M+da->s;
95: double x,y;
96: PetscInt base,*idx;
97: char node[10];
98: PetscTruth isnull;
99:
100: PetscViewerDrawGetDraw(viewer,0,&draw);
101: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
102: if (!da->coordinates) {
103: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
104: }
105: PetscDrawSynchronizedClear(draw);
107: /* first processor draw all node lines */
108: if (!rank) {
109: ymin = 0.0; ymax = da->N - 1;
110: for (xmin=0; xmin<da->M; xmin++) {
111: PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);
112: }
113: xmin = 0.0; xmax = da->M - 1;
114: for (ymin=0; ymin<da->N; ymin++) {
115: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
116: }
117: }
118: PetscDrawSynchronizedFlush(draw);
119: PetscDrawPause(draw);
121: /* draw my box */
122: ymin = da->ys; ymax = da->ye - 1; xmin = da->xs/da->w;
123: xmax =(da->xe-1)/da->w;
124: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
125: PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
126: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
127: PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);
129: /* put in numbers */
130: base = (da->base)/da->w;
131: for (y=ymin; y<=ymax; y++) {
132: for (x=xmin; x<=xmax; x++) {
133: sprintf(node,"%d",(int)base++);
134: PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);
135: }
136: }
138: PetscDrawSynchronizedFlush(draw);
139: PetscDrawPause(draw);
140: /* overlay ghost numbers, useful for error checking */
141: /* put in numbers */
143: base = 0; idx = da->idx;
144: ymin = da->Ys; ymax = da->Ye; xmin = da->Xs; xmax = da->Xe;
145: for (y=ymin; y<ymax; y++) {
146: for (x=xmin; x<xmax; x++) {
147: if ((base % da->w) == 0) {
148: sprintf(node,"%d",(int)(idx[base]/da->w));
149: PetscDrawString(draw,x/da->w,y,PETSC_DRAW_BLUE,node);
150: }
151: base++;
152: }
153: }
154: PetscDrawSynchronizedFlush(draw);
155: PetscDrawPause(draw);
156: } else {
157: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for DA2d",((PetscObject)viewer)->type_name);
158: }
159: return(0);
160: }
164: PetscErrorCode DAPublish_Petsc(PetscObject obj)
165: {
167: return(0);
168: }
172: PetscErrorCode DAGetElements_2d_P1(DA da,PetscInt *n,const PetscInt *e[])
173: {
175: PetscInt i,j,cnt,xs,xe = da->xe,ys,ye = da->ye,Xs = da->Xs, Xe = da->Xe, Ys = da->Ys;
178: if (!da->e) {
179: if (da->xs == Xs) xs = da->xs; else xs = da->xs - 1;
180: if (da->ys == Ys) ys = da->ys; else ys = da->ys - 1;
181: da->ne = 2*(xe - xs - 1)*(ye - ys - 1);
182: PetscMalloc((1 + 3*da->ne)*sizeof(PetscInt),&da->e);
183: cnt = 0;
184: for (j=ys; j<ye-1; j++) {
185: for (i=xs; i<xe-1; i++) {
186: da->e[cnt] = i - Xs + (j - Ys)*(Xe - Xs);
187: da->e[cnt+1] = i - Xs + 1 + (j - Ys)*(Xe - Xs);
188: da->e[cnt+2] = i - Xs + (j - Ys + 1)*(Xe - Xs);
190: da->e[cnt+3] = i - Xs + 1 + (j - Ys + 1)*(Xe - Xs);
191: da->e[cnt+4] = i - Xs + (j - Ys + 1)*(Xe - Xs);
192: da->e[cnt+5] = i - Xs + 1 + (j - Ys)*(Xe - Xs);
193: cnt += 6;
194: }
195: }
196: }
197: *n = da->ne;
198: *e = da->e;
199: return(0);
200: }
205: /*@C
206: DACreate2d - Creates an object that will manage the communication of two-dimensional
207: regular array data that is distributed across some processors.
209: Collective on MPI_Comm
211: Input Parameters:
212: + comm - MPI communicator
213: . wrap - type of periodicity should the array have.
214: Use one of DA_NONPERIODIC, DA_XPERIODIC, DA_YPERIODIC, or DA_XYPERIODIC.
215: . stencil_type - stencil type. Use either DA_STENCIL_BOX or DA_STENCIL_STAR.
216: . M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value
217: from the command line with -da_grid_x <M> -da_grid_y <N>)
218: . m,n - corresponding number of processors in each dimension
219: (or PETSC_DECIDE to have calculated)
220: . dof - number of degrees of freedom per node
221: . s - stencil width
222: - lx, ly - arrays containing the number of nodes in each cell along
223: the x and y coordinates, or PETSC_NULL. If non-null, these
224: must be of length as m and n, and the corresponding
225: m and n cannot be PETSC_DECIDE. The sum of the lx[] entries
226: must be M, and the sum of the ly[] entries must be N.
228: Output Parameter:
229: . inra - the resulting distributed array object
231: Options Database Key:
232: + -da_view - Calls DAView() at the conclusion of DACreate2d()
233: . -da_grid_x <nx> - number of grid points in x direction, if M < 0
234: . -da_grid_y <ny> - number of grid points in y direction, if N < 0
235: . -da_processors_x <nx> - number of processors in x direction
236: . -da_processors_y <ny> - number of processors in y direction
237: . -da_refine_x - refinement ratio in x direction
238: - -da_refine_y - refinement ratio in y direction
240: Level: beginner
242: Notes:
243: The stencil type DA_STENCIL_STAR with width 1 corresponds to the
244: standard 5-pt stencil, while DA_STENCIL_BOX with width 1 denotes
245: the standard 9-pt stencil.
247: The array data itself is NOT stored in the DA, it is stored in Vec objects;
248: The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
249: and DACreateLocalVector() and calls to VecDuplicate() if more are needed.
251: .keywords: distributed array, create, two-dimensional
253: .seealso: DADestroy(), DAView(), DACreate1d(), DACreate3d(), DAGlobalToLocalBegin(), DAGetRefinementFactor(),
254: DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(), DASetRefinementFactor(),
255: DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView()
257: @*/
258: PetscErrorCode DACreate2d(MPI_Comm comm,DAPeriodicType wrap,DAStencilType stencil_type,
259: PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,PetscInt *lx,PetscInt *ly,DA *inra)
260: {
262: PetscMPIInt rank,size;
263: PetscInt xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end;
264: PetscInt up,down,left,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn;
265: PetscInt xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count;
266: PetscInt s_x,s_y; /* s proportionalized to w */
267: PetscInt *flx = 0,*fly = 0;
268: PetscInt sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0,refine_x = 2, refine_y = 2,tM = M,tN = N;
269: DA da;
270: Vec local,global;
271: VecScatter ltog,gtol;
272: IS to,from;
276: *inra = 0;
277: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
278: DMInitializePackage(PETSC_NULL);
279: #endif
281: if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
282: if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
284: PetscOptionsBegin(comm,PETSC_NULL,"2d DA Options","DA");
285: if (M < 0){
286: tM = -M;
287: PetscOptionsInt("-da_grid_x","Number of grid points in x direction","DACreate2d",tM,&tM,PETSC_NULL);
288: }
289: if (N < 0){
290: tN = -N;
291: PetscOptionsInt("-da_grid_y","Number of grid points in y direction","DACreate2d",tN,&tN,PETSC_NULL);
292: }
293: PetscOptionsInt("-da_processors_x","Number of processors in x direction","DACreate2d",m,&m,PETSC_NULL);
294: PetscOptionsInt("-da_processors_y","Number of processors in y direction","DACreate2d",n,&n,PETSC_NULL);
295: PetscOptionsInt("-da_refine_x","Refinement ratio in x direction","DASetRefinementFactor",refine_x,&refine_x,PETSC_NULL);
296: PetscOptionsInt("-da_refine_y","Refinement ratio in y direction","DASetRefinementFactor",refine_y,&refine_y,PETSC_NULL);
297: PetscOptionsEnd();
298: M = tM; N = tN;
300: PetscHeaderCreate(da,_p_DA,struct _DAOps,DA_COOKIE,0,"DA",comm,DADestroy,DAView);
301: da->bops->publish = DAPublish_Petsc;
302: da->ops->createglobalvector = DACreateGlobalVector;
303: da->ops->getinterpolation = DAGetInterpolation;
304: da->ops->getcoloring = DAGetColoring;
305: da->ops->getmatrix = DAGetMatrix;
306: da->ops->refine = DARefine;
307: da->ops->getinjection = DAGetInjection;
308: da->ops->getelements = DAGetElements_2d_P1;
309: da->elementtype = DA_ELEMENT_P1;
311: PetscLogObjectMemory(da,sizeof(struct _p_DA));
312: da->dim = 2;
313: da->interptype = DA_Q1;
314: da->refine_x = refine_x;
315: da->refine_y = refine_y;
316: PetscMalloc(dof*sizeof(char*),&da->fieldname);
317: PetscMemzero(da->fieldname,dof*sizeof(char*));
319: MPI_Comm_size(comm,&size);
320: MPI_Comm_rank(comm,&rank);
322: if (m != PETSC_DECIDE) {
323: if (m < 1) {SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m);}
324: else if (m > size) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size);}
325: }
326: if (n != PETSC_DECIDE) {
327: if (n < 1) {SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n);}
328: else if (n > size) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size);}
329: }
331: if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
332: if (n != PETSC_DECIDE) {
333: m = size/n;
334: } else if (m != PETSC_DECIDE) {
335: n = size/m;
336: } else {
337: /* try for squarish distribution */
338: m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N)));
339: if (!m) m = 1;
340: while (m > 0) {
341: n = size/m;
342: if (m*n == size) break;
343: m--;
344: }
345: if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
346: }
347: if (m*n != size) SETERRQ(PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n ");
348: } else if (m*n != size) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition");
350: if (M < m) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m);
351: if (N < n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n);
353: /*
354: Determine locally owned region
355: xs is the first local node number, x is the number of local nodes
356: */
357: if (!lx) { /* user sets distribution */
358: PetscMalloc(m*sizeof(PetscInt),&lx);
359: flx = lx;
360: for (i=0; i<m; i++) {
361: lx[i] = M/m + ((M % m) > i);
362: }
363: }
364: x = lx[rank % m];
365: xs = 0;
366: for (i=0; i<(rank % m); i++) {
367: xs += lx[i];
368: }
369: #if defined(PETSC_USE_DEBUG)
370: left = xs;
371: for (i=(rank % m); i<m; i++) {
372: left += lx[i];
373: }
374: if (left != M) {
375: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M);
376: }
377: #endif
379: /*
380: Determine locally owned region
381: ys is the first local node number, y is the number of local nodes
382: */
383: if (!ly) { /* user sets distribution */
384: PetscMalloc(n*sizeof(PetscInt),&ly);
385: fly = ly;
386: for (i=0; i<n; i++) {
387: ly[i] = N/n + ((N % n) > i);
388: }
389: }
390: y = ly[rank/m];
391: ys = 0;
392: for (i=0; i<(rank/m); i++) {
393: ys += ly[i];
394: }
395: #if defined(PETSC_USE_DEBUG)
396: left = ys;
397: for (i=(rank/m); i<n; i++) {
398: left += ly[i];
399: }
400: if (left != N) {
401: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N);
402: }
403: #endif
405: if (x < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
406: if (y < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s);
407: xe = xs + x;
408: ye = ys + y;
410: /* determine ghost region */
411: /* Assume No Periodicity */
412: if (xs-s > 0) Xs = xs - s; else Xs = 0;
413: if (ys-s > 0) Ys = ys - s; else Ys = 0;
414: if (xe+s <= M) Xe = xe + s; else Xe = M;
415: if (ye+s <= N) Ye = ye + s; else Ye = N;
417: /* X Periodic */
418: if (DAXPeriodic(wrap)){
419: Xs = xs - s;
420: Xe = xe + s;
421: }
423: /* Y Periodic */
424: if (DAYPeriodic(wrap)){
425: Ys = ys - s;
426: Ye = ye + s;
427: }
429: /* Resize all X parameters to reflect w */
430: x *= dof;
431: xs *= dof;
432: xe *= dof;
433: Xs *= dof;
434: Xe *= dof;
435: s_x = s*dof;
436: s_y = s;
438: /* determine starting point of each processor */
439: nn = x*y;
440: PetscMalloc((2*size+1)*sizeof(PetscInt),&bases);
441: ldims = bases+size+1;
442: MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);
443: bases[0] = 0;
444: for (i=1; i<=size; i++) {
445: bases[i] = ldims[i-1];
446: }
447: for (i=1; i<=size; i++) {
448: bases[i] += bases[i-1];
449: }
451: /* allocate the base parallel and sequential vectors */
452: da->Nlocal = x*y;
453: VecCreateMPIWithArray(comm,da->Nlocal,PETSC_DECIDE,0,&global);
454: VecSetBlockSize(global,dof);
455: da->nlocal = (Xe-Xs)*(Ye-Ys);
456: VecCreateSeqWithArray(PETSC_COMM_SELF,da->nlocal,0,&local);
457: VecSetBlockSize(local,dof);
460: /* generate appropriate vector scatters */
461: /* local to global inserts non-ghost point region into global */
462: VecGetOwnershipRange(global,&start,&end);
463: ISCreateStride(comm,x*y,start,1,&to);
465: left = xs - Xs; down = ys - Ys; up = down + y;
466: PetscMalloc(x*(up - down)*sizeof(PetscInt),&idx);
467: count = 0;
468: for (i=down; i<up; i++) {
469: for (j=0; j<x/dof; j++) {
470: idx[count++] = left + i*(Xe-Xs) + j*dof;
471: }
472: }
473: ISCreateBlock(comm,dof,count,idx,&from);
474: PetscFree(idx);
476: VecScatterCreate(local,from,global,to,<og);
477: PetscLogObjectParent(da,to);
478: PetscLogObjectParent(da,from);
479: PetscLogObjectParent(da,ltog);
480: ISDestroy(from);
481: ISDestroy(to);
483: /* global to local must include ghost points */
484: if (stencil_type == DA_STENCIL_BOX) {
485: ISCreateStride(comm,(Xe-Xs)*(Ye-Ys),0,1,&to);
486: } else {
487: /* must drop into cross shape region */
488: /* ---------|
489: | top |
490: |--- ---|
491: | middle |
492: | |
493: ---- ----
494: | bottom |
495: -----------
496: Xs xs xe Xe */
497: /* bottom */
498: left = xs - Xs; down = ys - Ys; up = down + y;
499: count = down*(xe-xs) + (up-down)*(Xe-Xs) + (Ye-Ys-up)*(xe-xs);
500: PetscMalloc(count*sizeof(PetscInt)/dof,&idx);
501: count = 0;
502: for (i=0; i<down; i++) {
503: for (j=0; j<xe-xs; j += dof) {
504: idx[count++] = left + i*(Xe-Xs) + j;
505: }
506: }
507: /* middle */
508: for (i=down; i<up; i++) {
509: for (j=0; j<Xe-Xs; j += dof) {
510: idx[count++] = i*(Xe-Xs) + j;
511: }
512: }
513: /* top */
514: for (i=up; i<Ye-Ys; i++) {
515: for (j=0; j<xe-xs; j += dof) {
516: idx[count++] = left + i*(Xe-Xs) + j;
517: }
518: }
519: ISCreateBlock(comm,dof,count,idx,&to);
520: PetscFree(idx);
521: }
524: /* determine who lies on each side of us stored in n6 n7 n8
525: n3 n5
526: n0 n1 n2
527: */
529: /* Assume the Non-Periodic Case */
530: n1 = rank - m;
531: if (rank % m) {
532: n0 = n1 - 1;
533: } else {
534: n0 = -1;
535: }
536: if ((rank+1) % m) {
537: n2 = n1 + 1;
538: n5 = rank + 1;
539: n8 = rank + m + 1; if (n8 >= m*n) n8 = -1;
540: } else {
541: n2 = -1; n5 = -1; n8 = -1;
542: }
543: if (rank % m) {
544: n3 = rank - 1;
545: n6 = n3 + m; if (n6 >= m*n) n6 = -1;
546: } else {
547: n3 = -1; n6 = -1;
548: }
549: n7 = rank + m; if (n7 >= m*n) n7 = -1;
552: /* Modify for Periodic Cases */
553: if (wrap == DA_YPERIODIC) { /* Handle Top and Bottom Sides */
554: if (n1 < 0) n1 = rank + m * (n-1);
555: if (n7 < 0) n7 = rank - m * (n-1);
556: if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
557: if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
558: if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
559: if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
560: } else if (wrap == DA_XPERIODIC) { /* Handle Left and Right Sides */
561: if (n3 < 0) n3 = rank + (m-1);
562: if (n5 < 0) n5 = rank - (m-1);
563: if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
564: if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
565: if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
566: if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
567: } else if (wrap == DA_XYPERIODIC) {
569: /* Handle all four corners */
570: if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1;
571: if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0;
572: if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m;
573: if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1;
575: /* Handle Top and Bottom Sides */
576: if (n1 < 0) n1 = rank + m * (n-1);
577: if (n7 < 0) n7 = rank - m * (n-1);
578: if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
579: if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
580: if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
581: if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
583: /* Handle Left and Right Sides */
584: if (n3 < 0) n3 = rank + (m-1);
585: if (n5 < 0) n5 = rank - (m-1);
586: if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
587: if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
588: if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
589: if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
590: }
592: if (stencil_type == DA_STENCIL_STAR) {
593: /* save corner processor numbers */
594: sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8;
595: n0 = n2 = n6 = n8 = -1;
596: }
598: PetscMalloc((x+2*s_x)*(y+2*s_y)*sizeof(PetscInt),&idx);
599: PetscLogObjectMemory(da,(x+2*s_x)*(y+2*s_y)*sizeof(PetscInt));
600: nn = 0;
602: xbase = bases[rank];
603: for (i=1; i<=s_y; i++) {
604: if (n0 >= 0) { /* left below */
605: x_t = lx[n0 % m]*dof;
606: y_t = ly[(n0/m)];
607: s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
608: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
609: }
610: if (n1 >= 0) { /* directly below */
611: x_t = x;
612: y_t = ly[(n1/m)];
613: s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
614: for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
615: }
616: if (n2 >= 0) { /* right below */
617: x_t = lx[n2 % m]*dof;
618: y_t = ly[(n2/m)];
619: s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
620: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
621: }
622: }
624: for (i=0; i<y; i++) {
625: if (n3 >= 0) { /* directly left */
626: x_t = lx[n3 % m]*dof;
627: /* y_t = y; */
628: s_t = bases[n3] + (i+1)*x_t - s_x;
629: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
630: }
632: for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */
634: if (n5 >= 0) { /* directly right */
635: x_t = lx[n5 % m]*dof;
636: /* y_t = y; */
637: s_t = bases[n5] + (i)*x_t;
638: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
639: }
640: }
642: for (i=1; i<=s_y; i++) {
643: if (n6 >= 0) { /* left above */
644: x_t = lx[n6 % m]*dof;
645: /* y_t = ly[(n6/m)]; */
646: s_t = bases[n6] + (i)*x_t - s_x;
647: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
648: }
649: if (n7 >= 0) { /* directly above */
650: x_t = x;
651: /* y_t = ly[(n7/m)]; */
652: s_t = bases[n7] + (i-1)*x_t;
653: for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
654: }
655: if (n8 >= 0) { /* right above */
656: x_t = lx[n8 % m]*dof;
657: /* y_t = ly[(n8/m)]; */
658: s_t = bases[n8] + (i-1)*x_t;
659: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
660: }
661: }
663: base = bases[rank];
664: {
665: PetscInt nnn = nn/dof,*iidx;
666: PetscMalloc(nnn*sizeof(PetscInt),&iidx);
667: for (i=0; i<nnn; i++) {
668: iidx[i] = idx[dof*i];
669: }
670: ISCreateBlock(comm,dof,nnn,iidx,&from);
671: PetscFree(iidx);
672: }
673: VecScatterCreate(global,from,local,to,>ol);
674: PetscLogObjectParent(da,to);
675: PetscLogObjectParent(da,from);
676: PetscLogObjectParent(da,gtol);
677: ISDestroy(to);
678: ISDestroy(from);
680: if (stencil_type == DA_STENCIL_STAR) {
681: /*
682: Recompute the local to global mappings, this time keeping the
683: information about the cross corner processor numbers.
684: */
685: n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8;
686: nn = 0;
687: xbase = bases[rank];
688: for (i=1; i<=s_y; i++) {
689: if (n0 >= 0) { /* left below */
690: x_t = lx[n0 % m]*dof;
691: y_t = ly[(n0/m)];
692: s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
693: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
694: }
695: if (n1 >= 0) { /* directly below */
696: x_t = x;
697: y_t = ly[(n1/m)];
698: s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
699: for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
700: }
701: if (n2 >= 0) { /* right below */
702: x_t = lx[n2 % m]*dof;
703: y_t = ly[(n2/m)];
704: s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
705: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
706: }
707: }
709: for (i=0; i<y; i++) {
710: if (n3 >= 0) { /* directly left */
711: x_t = lx[n3 % m]*dof;
712: /* y_t = y; */
713: s_t = bases[n3] + (i+1)*x_t - s_x;
714: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
715: }
717: for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */
719: if (n5 >= 0) { /* directly right */
720: x_t = lx[n5 % m]*dof;
721: /* y_t = y; */
722: s_t = bases[n5] + (i)*x_t;
723: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
724: }
725: }
727: for (i=1; i<=s_y; i++) {
728: if (n6 >= 0) { /* left above */
729: x_t = lx[n6 % m]*dof;
730: /* y_t = ly[(n6/m)]; */
731: s_t = bases[n6] + (i)*x_t - s_x;
732: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
733: }
734: if (n7 >= 0) { /* directly above */
735: x_t = x;
736: /* y_t = ly[(n7/m)]; */
737: s_t = bases[n7] + (i-1)*x_t;
738: for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
739: }
740: if (n8 >= 0) { /* right above */
741: x_t = lx[n8 % m]*dof;
742: /* y_t = ly[(n8/m)]; */
743: s_t = bases[n8] + (i-1)*x_t;
744: for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
745: }
746: }
747: }
748: PetscFree(bases);
750: da->M = M; da->N = N; da->m = m; da->n = n; da->w = dof; da->s = s;
751: da->xs = xs; da->xe = xe; da->ys = ys; da->ye = ye; da->zs = 0; da->ze = 1;
752: da->Xs = Xs; da->Xe = Xe; da->Ys = Ys; da->Ye = Ye; da->Zs = 0; da->Ze = 1;
753: da->P = 1; da->p = 1;
755: VecDestroy(local);
756: VecDestroy(global);
758: da->gtol = gtol;
759: da->ltog = ltog;
760: da->idx = idx;
761: da->Nl = nn;
762: da->base = base;
763: da->wrap = wrap;
764: da->ops->view = DAView_2d;
765: da->stencil_type = stencil_type;
767: /*
768: Set the local to global ordering in the global vector, this allows use
769: of VecSetValuesLocal().
770: */
771: ISLocalToGlobalMappingCreateNC(comm,nn,idx,&da->ltogmap);
772: ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
773: PetscLogObjectParent(da,da->ltogmap);
775: *inra = da;
777: da->ltol = PETSC_NULL;
778: da->ao = PETSC_NULL;
781: if (!flx) {
782: PetscMalloc(m*sizeof(PetscInt),&flx);
783: PetscMemcpy(flx,lx,m*sizeof(PetscInt));
784: }
785: if (!fly) {
786: PetscMalloc(n*sizeof(PetscInt),&fly);
787: PetscMemcpy(fly,ly,n*sizeof(PetscInt));
788: }
789: da->lx = flx;
790: da->ly = fly;
791: DAView_Private(da);
792: PetscPublishAll(da);
793: return(0);
794: }
798: /*@
799: DARefine - Creates a new distributed array that is a refinement of a given
800: distributed array.
802: Collective on DA
804: Input Parameter:
805: + da - initial distributed array
806: - comm - communicator to contain refined DA, must be either same as the da communicator or include the
807: da communicator and be 2, 4, or 8 times larger. Currently ignored
809: Output Parameter:
810: . daref - refined distributed array
812: Level: advanced
814: Note:
815: Currently, refinement consists of just doubling the number of grid spaces
816: in each dimension of the DA.
818: .keywords: distributed array, refine
820: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy()
821: @*/
822: PetscErrorCode DARefine(DA da,MPI_Comm comm,DA *daref)
823: {
825: PetscInt M,N,P;
826: DA da2;
832: if (DAXPeriodic(da->wrap) || da->interptype == DA_Q0){
833: M = da->refine_x*da->M;
834: } else {
835: M = 1 + da->refine_x*(da->M - 1);
836: }
837: if (DAYPeriodic(da->wrap) || da->interptype == DA_Q0){
838: N = da->refine_y*da->N;
839: } else {
840: N = 1 + da->refine_y*(da->N - 1);
841: }
842: if (DAZPeriodic(da->wrap) || da->interptype == DA_Q0){
843: P = da->refine_z*da->P;
844: } else {
845: P = 1 + da->refine_z*(da->P - 1);
846: }
847: DACreate(da->comm,da->dim,da->wrap,da->stencil_type,M,N,P,da->m,da->n,da->p,da->w,da->s,0,0,0,&da2);
849: /* allow overloaded (user replaced) operations to be inherited by refinement clones */
850: da2->ops->getmatrix = da->ops->getmatrix;
851: da2->ops->getinterpolation = da->ops->getinterpolation;
852: da2->ops->getcoloring = da->ops->getcoloring;
853: da2->interptype = da->interptype;
854:
855: /* copy fill information if given */
856: if (da->dfill) {
857: PetscMalloc((da->dfill[da->w]+da->w+1)*sizeof(PetscInt),&da2->dfill);
858: PetscMemcpy(da2->dfill,da->dfill,(da->dfill[da->w]+da->w+1)*sizeof(PetscInt));
859: }
860: if (da->ofill) {
861: PetscMalloc((da->ofill[da->w]+da->w+1)*sizeof(PetscInt),&da2->ofill);
862: PetscMemcpy(da2->ofill,da->ofill,(da->ofill[da->w]+da->w+1)*sizeof(PetscInt));
863: }
864: /* copy the refine information */
865: da2->refine_x = da->refine_x;
866: da2->refine_y = da->refine_y;
867: da2->refine_z = da->refine_z;
868: *daref = da2;
869: return(0);
870: }
872: /*@C
873: DASetRefinementFactor - Set the ratios that the DA grid is refined
875: Collective on DA
877: Input Parameters:
878: + da - the DA object
879: . refine_x - ratio of fine grid to coarse in x direction (2 by default)
880: . refine_y - ratio of fine grid to coarse in y direction (2 by default)
881: - refine_z - ratio of fine grid to coarse in z direction (2 by default)
883: Options Database:
884: + -da_refine_x - refinement ratio in x direction
885: . -da_refine_y - refinement ratio in y direction
886: - -da_refine_y - refinement ratio in z direction
888: Level: intermediate
890: Notes: Pass PETSC_IGNORE to leave a value unchanged
892: .seealso: DARefine(), DAGetRefinementFactor()
893: @*/
894: PetscErrorCode DASetRefinementFactor(DA da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
895: {
897: if (refine_x > 0) da->refine_x = refine_x;
898: if (refine_y > 0) da->refine_y = refine_y;
899: if (refine_z > 0) da->refine_z = refine_z;
900: return(0);
901: }
903: /*@C
904: DAGetRefinementFactor - Gets the ratios that the DA grid is refined
906: Not Collective
908: Input Parameter:
909: . da - the DA object
911: Output Parameters:
912: + refine_x - ratio of fine grid to coarse in x direction (2 by default)
913: . refine_y - ratio of fine grid to coarse in y direction (2 by default)
914: - refine_z - ratio of fine grid to coarse in z direction (2 by default)
916: Level: intermediate
918: Notes: Pass PETSC_NULL for values you do not need
920: .seealso: DARefine(), DASetRefinementFactor()
921: @*/
922: PetscErrorCode DAGetRefinementFactor(DA da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
923: {
925: if (refine_x) *refine_x = da->refine_x;
926: if (refine_y) *refine_y = da->refine_y;
927: if (refine_z) *refine_z = da->refine_z;
928: return(0);
929: }
931: /*@C
932: DASetGetMatrix - Sets the routine used by the DA to allocate a matrix.
934: Collective on DA
936: Input Parameters:
937: + da - the DA object
938: - f - the function that allocates the matrix for that specific DA
940: Level: developer
942: Notes: See DASetBlockFills() that provides a simple way to provide the nonzero structure for
943: the diagonal and off-diagonal blocks of the matrix
945: .seealso: DAGetMatrix(), DASetBlockFills()
946: @*/
947: PetscErrorCode DASetGetMatrix(DA da,PetscErrorCode (*f)(DA, MatType,Mat*))
948: {
950: da->ops->getmatrix = f;
951: return(0);
952: }
954: /*
955: M is number of grid points
956: m is number of processors
958: */
961: PetscErrorCode DASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm)
962: {
964: PetscInt m,n = 0,x = 0,y = 0;
965: PetscMPIInt size,csize,rank;
968: MPI_Comm_size(comm,&size);
969: MPI_Comm_rank(comm,&rank);
971: csize = 4*size;
972: do {
973: if (csize % 4) SETERRQ4(PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y);
974: csize = csize/4;
975:
976: m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N)));
977: if (!m) m = 1;
978: while (m > 0) {
979: n = csize/m;
980: if (m*n == csize) break;
981: m--;
982: }
983: if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
985: x = M/m + ((M % m) > ((csize-1) % m));
986: y = (N + (csize-1)/m)/n;
987: } while ((x < 4 || y < 4) && csize > 1);
988: if (size != csize) {
989: MPI_Group entire_group,sub_group;
990: PetscMPIInt i,*groupies;
992: MPI_Comm_group(comm,&entire_group);
993: PetscMalloc(csize*sizeof(PetscInt),&groupies);
994: for (i=0; i<csize; i++) {
995: groupies[i] = (rank/csize)*csize + i;
996: }
997: MPI_Group_incl(entire_group,csize,groupies,&sub_group);
998: PetscFree(groupies);
999: MPI_Comm_create(comm,sub_group,outcomm);
1000: MPI_Group_free(&entire_group);
1001: MPI_Group_free(&sub_group);
1002: PetscInfo1(0,"DASplitComm2d:Creating redundant coarse problems of size %d\n",csize);
1003: } else {
1004: *outcomm = comm;
1005: }
1006: return(0);
1007: }
1011: /*@C
1012: DASetLocalFunction - Caches in a DA a local function.
1014: Collective on DA
1016: Input Parameter:
1017: + da - initial distributed array
1018: - lf - the local function
1020: Level: intermediate
1022: Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.
1024: .keywords: distributed array, refine
1026: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunctioni()
1027: @*/
1028: PetscErrorCode DASetLocalFunction(DA da,DALocalFunction1 lf)
1029: {
1032: da->lf = lf;
1033: return(0);
1034: }
1038: /*@C
1039: DASetLocalFunctioni - Caches in a DA a local function that evaluates a single component
1041: Collective on DA
1043: Input Parameter:
1044: + da - initial distributed array
1045: - lfi - the local function
1047: Level: intermediate
1049: .keywords: distributed array, refine
1051: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction()
1052: @*/
1053: PetscErrorCode DASetLocalFunctioni(DA da,PetscErrorCode (*lfi)(DALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
1054: {
1057: da->lfi = lfi;
1058: return(0);
1059: }
1063: /*@C
1064: DASetLocalFunctionib - Caches in a DA a block local function that evaluates a single component
1066: Collective on DA
1068: Input Parameter:
1069: + da - initial distributed array
1070: - lfi - the local function
1072: Level: intermediate
1074: .keywords: distributed array, refine
1076: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction()
1077: @*/
1078: PetscErrorCode DASetLocalFunctionib(DA da,PetscErrorCode (*lfi)(DALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
1079: {
1082: da->lfib = lfi;
1083: return(0);
1084: }
1088: PetscErrorCode DASetLocalAdicFunction_Private(DA da,DALocalFunction1 ad_lf)
1089: {
1092: da->adic_lf = ad_lf;
1093: return(0);
1094: }
1096: /*MC
1097: DASetLocalAdicFunctioni - Caches in a DA a local functioni computed by ADIC/ADIFOR
1099: Collective on DA
1101: Synopsis:
1102: PetscErrorCode DASetLocalAdicFunctioni(DA da,PetscInt (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1103:
1104: Input Parameter:
1105: + da - initial distributed array
1106: - ad_lfi - the local function as computed by ADIC/ADIFOR
1108: Level: intermediate
1110: .keywords: distributed array, refine
1112: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1113: DASetLocalJacobian(), DASetLocalFunctioni()
1114: M*/
1118: PetscErrorCode DASetLocalAdicFunctioni_Private(DA da,PetscErrorCode (*ad_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1119: {
1122: da->adic_lfi = ad_lfi;
1123: return(0);
1124: }
1126: /*MC
1127: DASetLocalAdicMFFunctioni - Caches in a DA a local functioni computed by ADIC/ADIFOR
1129: Collective on DA
1131: Synopsis:
1132: PetscErrorCode DASetLocalAdicFunctioni(DA da,int (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1133:
1134: Input Parameter:
1135: + da - initial distributed array
1136: - admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR
1138: Level: intermediate
1140: .keywords: distributed array, refine
1142: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1143: DASetLocalJacobian(), DASetLocalFunctioni()
1144: M*/
1148: PetscErrorCode DASetLocalAdicMFFunctioni_Private(DA da,PetscErrorCode (*admf_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1149: {
1152: da->adicmf_lfi = admf_lfi;
1153: return(0);
1154: }
1156: /*MC
1157: DASetLocalAdicFunctionib - Caches in a DA a block local functioni computed by ADIC/ADIFOR
1159: Collective on DA
1161: Synopsis:
1162: PetscErrorCode DASetLocalAdicFunctionib(DA da,PetscInt (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1163:
1164: Input Parameter:
1165: + da - initial distributed array
1166: - ad_lfi - the local function as computed by ADIC/ADIFOR
1168: Level: intermediate
1170: .keywords: distributed array, refine
1172: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1173: DASetLocalJacobian(), DASetLocalFunctionib()
1174: M*/
1178: PetscErrorCode DASetLocalAdicFunctionib_Private(DA da,PetscErrorCode (*ad_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1179: {
1182: da->adic_lfib = ad_lfi;
1183: return(0);
1184: }
1186: /*MC
1187: DASetLocalAdicMFFunctionib - Caches in a DA a block local functioni computed by ADIC/ADIFOR
1189: Collective on DA
1191: Synopsis:
1192: PetscErrorCode DASetLocalAdicFunctionib(DA da,int (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1193:
1194: Input Parameter:
1195: + da - initial distributed array
1196: - admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR
1198: Level: intermediate
1200: .keywords: distributed array, refine
1202: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1203: DASetLocalJacobian(), DASetLocalFunctionib()
1204: M*/
1208: PetscErrorCode DASetLocalAdicMFFunctionib_Private(DA da,PetscErrorCode (*admf_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1209: {
1212: da->adicmf_lfib = admf_lfi;
1213: return(0);
1214: }
1216: /*MC
1217: DASetLocalAdicMFFunction - Caches in a DA a local function computed by ADIC/ADIFOR
1219: Collective on DA
1221: Synopsis:
1222: PetscErrorCode DASetLocalAdicMFFunction(DA da,DALocalFunction1 ad_lf)
1223:
1224: Input Parameter:
1225: + da - initial distributed array
1226: - ad_lf - the local function as computed by ADIC/ADIFOR
1228: Level: intermediate
1230: .keywords: distributed array, refine
1232: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1233: DASetLocalJacobian()
1234: M*/
1238: PetscErrorCode DASetLocalAdicMFFunction_Private(DA da,DALocalFunction1 ad_lf)
1239: {
1242: da->adicmf_lf = ad_lf;
1243: return(0);
1244: }
1246: /*@C
1247: DASetLocalJacobian - Caches in a DA a local Jacobian
1249: Collective on DA
1251:
1252: Input Parameter:
1253: + da - initial distributed array
1254: - lj - the local Jacobian
1256: Level: intermediate
1258: Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.
1260: .keywords: distributed array, refine
1262: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction()
1263: @*/
1266: PetscErrorCode DASetLocalJacobian(DA da,DALocalFunction1 lj)
1267: {
1270: da->lj = lj;
1271: return(0);
1272: }
1276: /*@C
1277: DAGetLocalFunction - Gets from a DA a local function and its ADIC/ADIFOR Jacobian
1279: Collective on DA
1281: Input Parameter:
1282: . da - initial distributed array
1284: Output Parameters:
1285: . lf - the local function
1287: Level: intermediate
1289: .keywords: distributed array, refine
1291: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DASetLocalFunction()
1292: @*/
1293: PetscErrorCode DAGetLocalFunction(DA da,DALocalFunction1 *lf)
1294: {
1297: if (lf) *lf = da->lf;
1298: return(0);
1299: }
1303: /*@
1304: DAFormFunction - Evaluates a user provided function on each processor that
1305: share a DA
1307: Input Parameters:
1308: + da - the DA that defines the grid
1309: . vu - input vector
1310: . vfu - output vector
1311: - w - any user data
1313: Notes: Does NOT do ghost updates on vu upon entry
1315: This should eventually replace DAFormFunction1
1317: Level: advanced
1319: .seealso: DAComputeJacobian1WithAdic()
1321: @*/
1322: PetscErrorCode DAFormFunction(DA da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w)
1323: {
1325: void *u,*fu;
1326: DALocalInfo info;
1327: PetscErrorCode (*f)(DALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DALocalInfo*,void*,void*,void*))lf;
1328:
1330: DAGetLocalInfo(da,&info);
1331: DAVecGetArray(da,vu,&u);
1332: DAVecGetArray(da,vfu,&fu);
1334: (*f)(&info,u,fu,w);
1335: if (PetscExceptionValue(ierr)) {
1336: PetscErrorCode pDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
1337: pDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
1338: }
1339:
1341: DAVecRestoreArray(da,vu,&u);
1342: DAVecRestoreArray(da,vfu,&fu);
1343: return(0);
1344: }
1348: /*@C
1349: DAFormFunctionLocal - This is a universal function evaluation routine for
1350: a local DA function.
1352: Collective on DA
1354: Input Parameters:
1355: + da - the DA context
1356: . func - The local function
1357: . X - input vector
1358: . F - function vector
1359: - ctx - A user context
1361: Level: intermediate
1363: .seealso: DASetLocalFunction(), DASetLocalJacobian(), DASetLocalAdicFunction(), DASetLocalAdicMFFunction(),
1364: SNESSetFunction(), SNESSetJacobian()
1366: @*/
1367: PetscErrorCode DAFormFunctionLocal(DA da, DALocalFunction1 func, Vec X, Vec F, void *ctx)
1368: {
1369: Vec localX;
1370: DALocalInfo info;
1371: void *u;
1372: void *fu;
1376: DAGetLocalVector(da,&localX);
1377: /*
1378: Scatter ghost points to local vector, using the 2-step process
1379: DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
1380: */
1381: DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
1382: DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
1383: DAGetLocalInfo(da,&info);
1384: DAVecGetArray(da,localX,&u);
1385: DAVecGetArray(da,F,&fu);
1386: (*func)(&info,u,fu,ctx);
1387: if (PetscExceptionValue(ierr)) {
1388: PetscErrorCode pDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
1389: pDAVecRestoreArray(da,F,&fu);CHKERRQ(pierr);
1390: }
1391:
1392: DAVecRestoreArray(da,localX,&u);
1393: DAVecRestoreArray(da,F,&fu);
1394: if (PetscExceptionValue(ierr)) {
1395: PetscErrorCode pDARestoreLocalVector(da,&localX);CHKERRQ(pierr);
1396: }
1397:
1398: DARestoreLocalVector(da,&localX);
1399: return(0);
1400: }
1404: /*@C
1405: DAFormFunctionLocalGhost - This is a universal function evaluation routine for
1406: a local DA function, but the ghost values of the output are communicated and added.
1408: Collective on DA
1410: Input Parameters:
1411: + da - the DA context
1412: . func - The local function
1413: . X - input vector
1414: . F - function vector
1415: - ctx - A user context
1417: Level: intermediate
1419: .seealso: DASetLocalFunction(), DASetLocalJacobian(), DASetLocalAdicFunction(), DASetLocalAdicMFFunction(),
1420: SNESSetFunction(), SNESSetJacobian()
1422: @*/
1423: PetscErrorCode DAFormFunctionLocalGhost(DA da, DALocalFunction1 func, Vec X, Vec F, void *ctx)
1424: {
1425: Vec localX, localF;
1426: DALocalInfo info;
1427: void *u;
1428: void *fu;
1432: DAGetLocalVector(da,&localX);
1433: DAGetLocalVector(da,&localF);
1434: /*
1435: Scatter ghost points to local vector, using the 2-step process
1436: DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
1437: */
1438: DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
1439: DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
1440: VecSet(F, 0.0);
1441: VecSet(localF, 0.0);
1442: DAGetLocalInfo(da,&info);
1443: DAVecGetArray(da,localX,&u);
1444: DAVecGetArray(da,localF,&fu);
1445: (*func)(&info,u,fu,ctx);
1446: if (PetscExceptionValue(ierr)) {
1447: PetscErrorCode pDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
1448: pDAVecRestoreArray(da,localF,&fu);CHKERRQ(pierr);
1449: }
1450:
1451: DALocalToGlobalBegin(da,localF,F);
1452: DALocalToGlobalEnd(da,localF,F);
1453: DAVecRestoreArray(da,localX,&u);
1454: DAVecRestoreArray(da,localF,&fu);
1455: if (PetscExceptionValue(ierr)) {
1456: PetscErrorCode pDARestoreLocalVector(da,&localX);CHKERRQ(pierr);
1457: DARestoreLocalVector(da,&localF);
1458: }
1459:
1460: DARestoreLocalVector(da,&localX);
1461: DARestoreLocalVector(da,&localF);
1462: return(0);
1463: }
1467: /*@
1468: DAFormFunction1 - Evaluates a user provided function on each processor that
1469: share a DA
1471: Input Parameters:
1472: + da - the DA that defines the grid
1473: . vu - input vector
1474: . vfu - output vector
1475: - w - any user data
1477: Notes: Does NOT do ghost updates on vu upon entry
1479: Level: advanced
1481: .seealso: DAComputeJacobian1WithAdic()
1483: @*/
1484: PetscErrorCode DAFormFunction1(DA da,Vec vu,Vec vfu,void *w)
1485: {
1487: void *u,*fu;
1488: DALocalInfo info;
1489:
1492: DAGetLocalInfo(da,&info);
1493: DAVecGetArray(da,vu,&u);
1494: DAVecGetArray(da,vfu,&fu);
1496: CHKMEMQ;
1497: (*da->lf)(&info,u,fu,w);
1498: if (PetscExceptionValue(ierr)) {
1499: PetscErrorCode pDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
1500: pDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
1501: }
1502:
1503: CHKMEMQ;
1505: DAVecRestoreArray(da,vu,&u);
1506: DAVecRestoreArray(da,vfu,&fu);
1507: return(0);
1508: }
1512: PetscErrorCode DAFormFunctioniTest1(DA da,void *w)
1513: {
1514: Vec vu,fu,fui;
1516: PetscInt i,n;
1517: PetscScalar *ui;
1518: PetscRandom rnd;
1519: PetscReal norm;
1522: DAGetLocalVector(da,&vu);
1523: PetscRandomCreate(PETSC_COMM_SELF,&rnd);
1524: PetscRandomSetFromOptions(rnd);
1525: VecSetRandom(vu,rnd);
1526: PetscRandomDestroy(rnd);
1528: DAGetGlobalVector(da,&fu);
1529: DAGetGlobalVector(da,&fui);
1530:
1531: DAFormFunction1(da,vu,fu,w);
1533: VecGetArray(fui,&ui);
1534: VecGetLocalSize(fui,&n);
1535: for (i=0; i<n; i++) {
1536: DAFormFunctioni1(da,i,vu,ui+i,w);
1537: }
1538: VecRestoreArray(fui,&ui);
1540: VecAXPY(fui,-1.0,fu);
1541: VecNorm(fui,NORM_2,&norm);
1542: PetscPrintf(da->comm,"Norm of difference in vectors %G\n",norm);
1543: VecView(fu,0);
1544: VecView(fui,0);
1546: DARestoreLocalVector(da,&vu);
1547: DARestoreGlobalVector(da,&fu);
1548: DARestoreGlobalVector(da,&fui);
1549: return(0);
1550: }
1554: /*@
1555: DAFormFunctioni1 - Evaluates a user provided point-wise function
1557: Input Parameters:
1558: + da - the DA that defines the grid
1559: . i - the component of the function we wish to compute (must be local)
1560: . vu - input vector
1561: . vfu - output value
1562: - w - any user data
1564: Notes: Does NOT do ghost updates on vu upon entry
1566: Level: advanced
1568: .seealso: DAComputeJacobian1WithAdic()
1570: @*/
1571: PetscErrorCode DAFormFunctioni1(DA da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
1572: {
1574: void *u;
1575: DALocalInfo info;
1576: MatStencil stencil;
1577:
1580: DAGetLocalInfo(da,&info);
1581: DAVecGetArray(da,vu,&u);
1583: /* figure out stencil value from i */
1584: stencil.c = i % info.dof;
1585: stencil.i = (i % (info.xm*info.dof))/info.dof;
1586: stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
1587: stencil.k = i/(info.xm*info.ym*info.dof);
1589: (*da->lfi)(&info,&stencil,u,vfu,w);
1591: DAVecRestoreArray(da,vu,&u);
1592: return(0);
1593: }
1597: /*@
1598: DAFormFunctionib1 - Evaluates a user provided point-block function
1600: Input Parameters:
1601: + da - the DA that defines the grid
1602: . i - the component of the function we wish to compute (must be local)
1603: . vu - input vector
1604: . vfu - output value
1605: - w - any user data
1607: Notes: Does NOT do ghost updates on vu upon entry
1609: Level: advanced
1611: .seealso: DAComputeJacobian1WithAdic()
1613: @*/
1614: PetscErrorCode DAFormFunctionib1(DA da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
1615: {
1617: void *u;
1618: DALocalInfo info;
1619: MatStencil stencil;
1620:
1622: DAGetLocalInfo(da,&info);
1623: DAVecGetArray(da,vu,&u);
1625: /* figure out stencil value from i */
1626: stencil.c = i % info.dof;
1627: if (stencil.c) SETERRQ(PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block");
1628: stencil.i = (i % (info.xm*info.dof))/info.dof;
1629: stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
1630: stencil.k = i/(info.xm*info.ym*info.dof);
1632: (*da->lfib)(&info,&stencil,u,vfu,w);
1634: DAVecRestoreArray(da,vu,&u);
1635: return(0);
1636: }
1638: #if defined(new)
1641: /*
1642: DAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local
1643: function lives on a DA
1645: y ~= (F(u + ha) - F(u))/h,
1646: where F = nonlinear function, as set by SNESSetFunction()
1647: u = current iterate
1648: h = difference interval
1649: */
1650: PetscErrorCode DAGetDiagonal_MFFD(DA da,Vec U,Vec a)
1651: {
1652: PetscScalar h,*aa,*ww,v;
1653: PetscReal epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON;
1655: PetscInt gI,nI;
1656: MatStencil stencil;
1657: DALocalInfo info;
1658:
1660: (*ctx->func)(0,U,a,ctx->funcctx);
1661: (*ctx->funcisetbase)(U,ctx->funcctx);
1663: VecGetArray(U,&ww);
1664: VecGetArray(a,&aa);
1665:
1666: nI = 0;
1667: h = ww[gI];
1668: if (h == 0.0) h = 1.0;
1669: #if !defined(PETSC_USE_COMPLEX)
1670: if (h < umin && h >= 0.0) h = umin;
1671: else if (h < 0.0 && h > -umin) h = -umin;
1672: #else
1673: if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0) h = umin;
1674: else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin;
1675: #endif
1676: h *= epsilon;
1677:
1678: ww[gI += h;
1679: (*ctx->funci)(i,w,&v,ctx->funcctx);
1680: aa[nI] = (v - aa[nI])/h;
1681: ww[gI] -= h;
1682: nI++;
1683: }
1684: VecRestoreArray(U,&ww);
1685: VecRestoreArray(a,&aa);
1686: return(0);
1687: }
1688: #endif
1690: #if defined(PETSC_HAVE_ADIC)
1692: #include "adic/ad_utils.h"
1697: /*@C
1698: DAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that
1699: share a DA
1701: Input Parameters:
1702: + da - the DA that defines the grid
1703: . vu - input vector (ghosted)
1704: . J - output matrix
1705: - w - any user data
1707: Level: advanced
1709: Notes: Does NOT do ghost updates on vu upon entry
1711: .seealso: DAFormFunction1()
1713: @*/
1714: PetscErrorCode DAComputeJacobian1WithAdic(DA da,Vec vu,Mat J,void *w)
1715: {
1717: PetscInt gtdof,tdof;
1718: PetscScalar *ustart;
1719: DALocalInfo info;
1720: void *ad_u,*ad_f,*ad_ustart,*ad_fstart;
1721: ISColoring iscoloring;
1724: DAGetLocalInfo(da,&info);
1726: PetscADResetIndep();
1728: /* get space for derivative objects. */
1729: DAGetAdicArray(da,PETSC_TRUE,(void **)&ad_u,&ad_ustart,>dof);
1730: DAGetAdicArray(da,PETSC_FALSE,(void **)&ad_f,&ad_fstart,&tdof);
1731: VecGetArray(vu,&ustart);
1732: DAGetColoring(da,IS_COLORING_GHOSTED,&iscoloring);
1734: PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart);
1736: VecRestoreArray(vu,&ustart);
1737: ISColoringDestroy(iscoloring);
1738: PetscADIncrementTotalGradSize(iscoloring->n);
1739: PetscADSetIndepDone();
1742: (*da->adic_lf)(&info,ad_u,ad_f,w);
1745: /* stick the values into the matrix */
1746: MatSetValuesAdic(J,(PetscScalar**)ad_fstart);
1748: /* return space for derivative objects. */
1749: DARestoreAdicArray(da,PETSC_TRUE,(void **)&ad_u,&ad_ustart,>dof);
1750: DARestoreAdicArray(da,PETSC_FALSE,(void **)&ad_f,&ad_fstart,&tdof);
1751: return(0);
1752: }
1756: /*@C
1757: DAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on
1758: each processor that shares a DA.
1760: Input Parameters:
1761: + da - the DA that defines the grid
1762: . vu - Jacobian is computed at this point (ghosted)
1763: . v - product is done on this vector (ghosted)
1764: . fu - output vector = J(vu)*v (not ghosted)
1765: - w - any user data
1767: Notes:
1768: This routine does NOT do ghost updates on vu upon entry.
1770: Level: advanced
1772: .seealso: DAFormFunction1()
1774: @*/
1775: PetscErrorCode DAMultiplyByJacobian1WithAdic(DA da,Vec vu,Vec v,Vec f,void *w)
1776: {
1778: PetscInt i,gtdof,tdof;
1779: PetscScalar *avu,*av,*af,*ad_vustart,*ad_fstart;
1780: DALocalInfo info;
1781: void *ad_vu,*ad_f;
1784: DAGetLocalInfo(da,&info);
1786: /* get space for derivative objects. */
1787: DAGetAdicMFArray(da,PETSC_TRUE,(void **)&ad_vu,(void**)&ad_vustart,>dof);
1788: DAGetAdicMFArray(da,PETSC_FALSE,(void **)&ad_f,(void**)&ad_fstart,&tdof);
1790: /* copy input vector into derivative object */
1791: VecGetArray(vu,&avu);
1792: VecGetArray(v,&av);
1793: for (i=0; i<gtdof; i++) {
1794: ad_vustart[2*i] = avu[i];
1795: ad_vustart[2*i+1] = av[i];
1796: }
1797: VecRestoreArray(vu,&avu);
1798: VecRestoreArray(v,&av);
1800: PetscADResetIndep();
1801: PetscADIncrementTotalGradSize(1);
1802: PetscADSetIndepDone();
1804: (*da->adicmf_lf)(&info,ad_vu,ad_f,w);
1806: /* stick the values into the vector */
1807: VecGetArray(f,&af);
1808: for (i=0; i<tdof; i++) {
1809: af[i] = ad_fstart[2*i+1];
1810: }
1811: VecRestoreArray(f,&af);
1813: /* return space for derivative objects. */
1814: DARestoreAdicMFArray(da,PETSC_TRUE,(void **)&ad_vu,(void**)&ad_vustart,>dof);
1815: DARestoreAdicMFArray(da,PETSC_FALSE,(void **)&ad_f,(void**)&ad_fstart,&tdof);
1816: return(0);
1817: }
1818: #endif
1822: /*@
1823: DAComputeJacobian1 - Evaluates a local Jacobian function on each processor that
1824: share a DA
1826: Input Parameters:
1827: + da - the DA that defines the grid
1828: . vu - input vector (ghosted)
1829: . J - output matrix
1830: - w - any user data
1832: Notes: Does NOT do ghost updates on vu upon entry
1834: Level: advanced
1836: .seealso: DAFormFunction1()
1838: @*/
1839: PetscErrorCode DAComputeJacobian1(DA da,Vec vu,Mat J,void *w)
1840: {
1842: void *u;
1843: DALocalInfo info;
1846: DAGetLocalInfo(da,&info);
1847: DAVecGetArray(da,vu,&u);
1848: (*da->lj)(&info,u,J,w);
1849: DAVecRestoreArray(da,vu,&u);
1850: return(0);
1851: }
1856: /*
1857: DAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that
1858: share a DA
1860: Input Parameters:
1861: + da - the DA that defines the grid
1862: . vu - input vector (ghosted)
1863: . J - output matrix
1864: - w - any user data
1866: Notes: Does NOT do ghost updates on vu upon entry
1868: .seealso: DAFormFunction1()
1870: */
1871: PetscErrorCode DAComputeJacobian1WithAdifor(DA da,Vec vu,Mat J,void *w)
1872: {
1873: PetscErrorCode ierr;
1874: PetscInt i,Nc,N;
1875: ISColoringValue *color;
1876: DALocalInfo info;
1877: PetscScalar *u,*g_u,*g_f,*f,*p_u;
1878: ISColoring iscoloring;
1879: void (*lf)(PetscInt*,DALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) =
1880: (void (*)(PetscInt*,DALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*da->adifor_lf;
1883: DAGetColoring(da,IS_COLORING_GHOSTED,&iscoloring);
1884: Nc = iscoloring->n;
1885: DAGetLocalInfo(da,&info);
1886: N = info.gxm*info.gym*info.gzm*info.dof;
1888: /* get space for derivative objects. */
1889: PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);
1890: PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));
1891: p_u = g_u;
1892: color = iscoloring->colors;
1893: for (i=0; i<N; i++) {
1894: p_u[*color++] = 1.0;
1895: p_u += Nc;
1896: }
1897: ISColoringDestroy(iscoloring);
1898: PetscMalloc(Nc*info.xm*info.ym*info.zm*info.dof*sizeof(PetscScalar),&g_f);
1899: PetscMalloc(info.xm*info.ym*info.zm*info.dof*sizeof(PetscScalar),&f);
1901: /* Seed the input array g_u with coloring information */
1902:
1903: VecGetArray(vu,&u);
1904: (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);
1905: VecRestoreArray(vu,&u);
1907: /* stick the values into the matrix */
1908: /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */
1909: MatSetValuesAdifor(J,Nc,g_f);
1911: /* return space for derivative objects. */
1912: PetscFree(g_u);
1913: PetscFree(g_f);
1914: PetscFree(f);
1915: return(0);
1916: }
1920: /*@C
1921: DAFormjacobianLocal - This is a universal Jacobian evaluation routine for
1922: a local DA function.
1924: Collective on DA
1926: Input Parameters:
1927: + da - the DA context
1928: . func - The local function
1929: . X - input vector
1930: . J - Jacobian matrix
1931: - ctx - A user context
1933: Level: intermediate
1935: .seealso: DASetLocalFunction(), DASetLocalJacobian(), DASetLocalAdicFunction(), DASetLocalAdicMFFunction(),
1936: SNESSetFunction(), SNESSetJacobian()
1938: @*/
1939: PetscErrorCode DAFormJacobianLocal(DA da, DALocalFunction1 func, Vec X, Mat J, void *ctx)
1940: {
1941: Vec localX;
1942: DALocalInfo info;
1943: void *u;
1947: DAGetLocalVector(da,&localX);
1948: /*
1949: Scatter ghost points to local vector, using the 2-step process
1950: DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
1951: */
1952: DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
1953: DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
1954: DAGetLocalInfo(da,&info);
1955: DAVecGetArray(da,localX,&u);
1956: (*func)(&info,u,J,ctx);
1957: if (PetscExceptionValue(ierr)) {
1958: PetscErrorCode pDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
1959: }
1960:
1961: DAVecRestoreArray(da,localX,&u);
1962: if (PetscExceptionValue(ierr)) {
1963: PetscErrorCode pDARestoreLocalVector(da,&localX);CHKERRQ(pierr);
1964: }
1965:
1966: DARestoreLocalVector(da,&localX);
1967: return(0);
1968: }
1972: /*@C
1973: DAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC
1974: to a vector on each processor that shares a DA.
1976: Input Parameters:
1977: + da - the DA that defines the grid
1978: . vu - Jacobian is computed at this point (ghosted)
1979: . v - product is done on this vector (ghosted)
1980: . fu - output vector = J(vu)*v (not ghosted)
1981: - w - any user data
1983: Notes:
1984: This routine does NOT do ghost updates on vu and v upon entry.
1985:
1986: Automatically calls DAMultiplyByJacobian1WithAdifor() or DAMultiplyByJacobian1WithAdic()
1987: depending on whether DASetLocalAdicMFFunction() or DASetLocalAdiforMFFunction() was called.
1989: Level: advanced
1991: .seealso: DAFormFunction1(), DAMultiplyByJacobian1WithAdifor(), DAMultiplyByJacobian1WithAdic()
1993: @*/
1994: PetscErrorCode DAMultiplyByJacobian1WithAD(DA da,Vec u,Vec v,Vec f,void *w)
1995: {
1999: if (da->adicmf_lf) {
2000: #if defined(PETSC_HAVE_ADIC)
2001: DAMultiplyByJacobian1WithAdic(da,u,v,f,w);
2002: #else
2003: SETERRQ(PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers");
2004: #endif
2005: } else if (da->adiformf_lf) {
2006: DAMultiplyByJacobian1WithAdifor(da,u,v,f,w);
2007: } else {
2008: SETERRQ(PETSC_ERR_ORDER,"Must call DASetLocalAdiforMFFunction() or DASetLocalAdicMFFunction() before using");
2009: }
2010: return(0);
2011: }
2016: /*@C
2017: DAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that
2018: share a DA to a vector
2020: Input Parameters:
2021: + da - the DA that defines the grid
2022: . vu - Jacobian is computed at this point (ghosted)
2023: . v - product is done on this vector (ghosted)
2024: . fu - output vector = J(vu)*v (not ghosted)
2025: - w - any user data
2027: Notes: Does NOT do ghost updates on vu and v upon entry
2029: Level: advanced
2031: .seealso: DAFormFunction1()
2033: @*/
2034: PetscErrorCode DAMultiplyByJacobian1WithAdifor(DA da,Vec u,Vec v,Vec f,void *w)
2035: {
2037: PetscScalar *au,*av,*af,*awork;
2038: Vec work;
2039: DALocalInfo info;
2040: void (*lf)(DALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) =
2041: (void (*)(DALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*da->adiformf_lf;
2044: DAGetLocalInfo(da,&info);
2046: DAGetGlobalVector(da,&work);
2047: VecGetArray(u,&au);
2048: VecGetArray(v,&av);
2049: VecGetArray(f,&af);
2050: VecGetArray(work,&awork);
2051: (lf)(&info,au,av,awork,af,w,&ierr);
2052: VecRestoreArray(u,&au);
2053: VecRestoreArray(v,&av);
2054: VecRestoreArray(f,&af);
2055: VecRestoreArray(work,&awork);
2056: DARestoreGlobalVector(da,&work);
2058: return(0);
2059: }
2063: /*@C
2064: DASetInterpolationType - Sets the type of interpolation that will be
2065: returned by DAGetInterpolation()
2067: Collective on DA
2069: Input Parameter:
2070: + da - initial distributed array
2071: . ctype - DA_Q1 and DA_Q0 are currently the only supported forms
2073: Level: intermediate
2075: Notes: you should call this on the coarser of the two DAs you pass to DAGetInterpolation()
2077: .keywords: distributed array, interpolation
2079: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DA, DAInterpolationType
2080: @*/
2081: PetscErrorCode DASetInterpolationType(DA da,DAInterpolationType ctype)
2082: {
2085: da->interptype = ctype;
2086: return(0);
2087: }