Actual source code: pbvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:    This file contains routines for Parallel vector operations.
  4:  */
 5:  #include src/vec/vec/impls/mpi/pvecimpl.h

  7: /*
  8:        Note this code is very similar to VecPublish_Seq()
  9: */
 12: static PetscErrorCode VecPublish_MPI(PetscObject obj)
 13: {
 15:   return(0);
 16: }

 20: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 21: {
 22:   PetscScalar    sum,work;

 26:   VecDot_Seq(xin,yin,&work);
 27:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 28:   *z = sum;
 29:   return(0);
 30: }

 34: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 35: {
 36:   PetscScalar    sum,work;

 40:   VecTDot_Seq(xin,yin,&work);
 41:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 42:   *z   = sum;
 43:   return(0);
 44: }

 48: PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
 49: {
 51:   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
 52:     v->stash.donotstash = PETSC_TRUE;
 53:   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
 54:     v->stash.donotstash = PETSC_FALSE;
 55:   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
 56:     v->stash.ignorenegidx = PETSC_TRUE;
 57:   } else if (op == VEC_TREAT_NEGATIVE_INDICES) {
 58:     v->stash.ignorenegidx = PETSC_FALSE;
 59:   }
 60:   return(0);
 61: }
 62: 
 63: EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
 65: EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);

 70: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
 71: {
 73:   Vec_MPI        *v = (Vec_MPI *)vin->data;

 76:   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
 77:   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
 78:   v->array = (PetscScalar *)a;
 79:   if (v->localrep) {
 80:     VecPlaceArray(v->localrep,a);
 81:   }
 82:   return(0);
 83: }

 85: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
 86: EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);

 88: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
 89:             VecDuplicateVecs_Default,
 90:             VecDestroyVecs_Default,
 91:             VecDot_MPI,
 92:             VecMDot_MPI,
 93:             VecNorm_MPI,
 94:             VecTDot_MPI,
 95:             VecMTDot_MPI,
 96:             VecScale_Seq,
 97:             VecCopy_Seq, /* 10 */
 98:             VecSet_Seq,
 99:             VecSwap_Seq,
100:             VecAXPY_Seq,
101:             VecAXPBY_Seq,
102:             VecMAXPY_Seq,
103:             VecAYPX_Seq,
104:             VecWAXPY_Seq,
105:             VecPointwiseMult_Seq,
106:             VecPointwiseDivide_Seq,
107:             VecSetValues_MPI, /* 20 */
108:             VecAssemblyBegin_MPI,
109:             VecAssemblyEnd_MPI,
110:             VecGetArray_Seq,
111:             VecGetSize_MPI,
112:             VecGetSize_Seq,
113:             VecRestoreArray_Seq,
114:             VecMax_MPI,
115:             VecMin_MPI,
116:             VecSetRandom_Seq,
117:             VecSetOption_MPI,
118:             VecSetValuesBlocked_MPI,
119:             VecDestroy_MPI,
120:             VecView_MPI,
121:             VecPlaceArray_MPI,
122:             VecReplaceArray_Seq,
123:             VecDot_Seq,
124:             VecTDot_Seq,
125:             VecNorm_Seq,
126:             VecMDot_Seq,
127:             VecMTDot_Seq,
128:             VecLoadIntoVector_Default,
129:             VecReciprocal_Default,
130:             0, /* VecViewNative... */
131:             VecConjugate_Seq,
132:             0,
133:             0,
134:             VecResetArray_Seq,
135:             0,
136:             VecMaxPointwiseDivide_Seq,
137:             VecLoad_Binary,
138:             VecPointwiseMax_Seq,
139:             VecPointwiseMaxAbs_Seq,
140:             VecPointwiseMin_Seq,
141:             VecGetValues_MPI};

145: /*
146:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
147:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
148:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
149: */
150: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[])
151: {
152:   Vec_MPI        *s;


157:   v->bops->publish   = VecPublish_MPI;
158:   PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->map.n+nghost+1)*sizeof(PetscScalar));
159:   PetscNew(Vec_MPI,&s);
160:   PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
161:   v->data        = (void*)s;
162:   s->nghost      = nghost;
163:   v->mapping     = 0;
164:   v->bmapping    = 0;
165:   v->petscnative = PETSC_TRUE;

167:   if (v->map.bs == -1) v->map.bs = 1;
168:   PetscMapSetUp(&v->map);
169:   if (array) {
170:     s->array           = (PetscScalar *)array;
171:     s->array_allocated = 0;
172:   } else {
173:     PetscInt n         = v->map.n+nghost;
174:     PetscMalloc(n*sizeof(PetscScalar),&s->array);
175:     s->array_allocated = s->array;
176:     PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));
177:   }

179:   /* By default parallel vectors do not have local representation */
180:   s->localrep    = 0;
181:   s->localupdate = 0;

183:   v->stash.insertmode  = NOT_SET_VALUES;
184:   /* create the stashes. The block-size for bstash is set later when 
185:      VecSetValuesBlocked is called.
186:   */
187:   VecStashCreate_Private(v->comm,1,&v->stash);
188:   VecStashCreate_Private(v->comm,v->map.bs,&v->bstash);
189: 
190: #if defined(PETSC_HAVE_MATLAB_ENGINE)
191:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
192:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
193: #endif
194:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
195:   PetscPublishAll(v);
196:   return(0);
197: }

199: /*MC
200:    VECMPI - VECMPI = "mpi" - The basic parallel vector

202:    Options Database Keys:
203: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()

205:   Level: beginner

207: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
208: M*/

213: PetscErrorCode  VecCreate_MPI(Vec vv)
214: {

218:   VecCreate_MPI_Private(vv,0,0);
219:   return(0);
220: }

225: /*@C
226:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
227:    where the user provides the array space to store the vector values.

229:    Collective on MPI_Comm

231:    Input Parameters:
232: +  comm  - the MPI communicator to use
233: .  n     - local vector length, cannot be PETSC_DECIDE
234: .  N     - global vector length (or PETSC_DECIDE to have calculated)
235: -  array - the user provided array to store the vector values

237:    Output Parameter:
238: .  vv - the vector
239:  
240:    Notes:
241:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
242:    same type as an existing vector.

244:    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
245:    at a later stage to SET the array for storing the vector values.

247:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
248:    The user should not free the array until the vector is destroyed.

250:    Level: intermediate

252:    Concepts: vectors^creating with array

254: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
255:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

257: @*/
258: PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
259: {

263:   if (n == PETSC_DECIDE) {
264:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
265:   }
266:   PetscSplitOwnership(comm,&n,&N);
267:   VecCreate(comm,vv);
268:   VecSetSizes(*vv,n,N);
269:   VecCreate_MPI_Private(*vv,0,array);
270:   return(0);
271: }

275: /*@
276:     VecGhostGetLocalForm - Obtains the local ghosted representation of 
277:     a parallel vector created with VecCreateGhost().

279:     Not Collective

281:     Input Parameter:
282: .   g - the global vector. Vector must be have been obtained with either
283:         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().

285:     Output Parameter:
286: .   l - the local (ghosted) representation

288:     Notes:
289:     This routine does not actually update the ghost values, but rather it
290:     returns a sequential vector that includes the locations for the ghost
291:     values and their current values. The returned vector and the original
292:     vector passed in share the same array that contains the actual vector data.

294:     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
295:     finished using the object.

297:     Level: advanced

299:    Concepts: vectors^ghost point access

301: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()

303: @*/
304: PetscErrorCode  VecGhostGetLocalForm(Vec g,Vec *l)
305: {
307:   PetscTruth     isseq,ismpi;


313:   PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
314:   PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
315:   if (ismpi) {
316:     Vec_MPI *v  = (Vec_MPI*)g->data;
317:     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
318:     *l = v->localrep;
319:   } else if (isseq) {
320:     *l = g;
321:   } else {
322:     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
323:   }
324:   PetscObjectReference((PetscObject)*l);
325:   return(0);
326: }

330: /*@
331:     VecGhostRestoreLocalForm - Restores the local ghosted representation of 
332:     a parallel vector obtained with VecGhostGetLocalForm().

334:     Not Collective

336:     Input Parameter:
337: +   g - the global vector
338: -   l - the local (ghosted) representation

340:     Notes:
341:     This routine does not actually update the ghost values, but rather it
342:     returns a sequential vector that includes the locations for the ghost values
343:     and their current values.

345:     Level: advanced

347: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
348: @*/
349: PetscErrorCode  VecGhostRestoreLocalForm(Vec g,Vec *l)
350: {
352:   PetscObjectDereference((PetscObject)*l);
353:   return(0);
354: }

358: /*@
359:    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
360:    local representation to global or global representation to local.

362:    Collective on Vec

364:    Input Parameters:
365: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
366: .  insertmode - one of ADD_VALUES or INSERT_VALUES
367: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

369:    Notes:
370:    Use the following to update the ghost regions with correct values from the owning process
371: .vb
372:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
373:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
374: .ve

376:    Use the following to accumulate the ghost region values onto the owning processors
377: .vb
378:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
379:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
380: .ve

382:    To accumulate the ghost region values onto the owning processors and then update
383:    the ghost regions correctly, call the later followed by the former, i.e.,
384: .vb
385:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
386:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
387:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
388:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
389: .ve

391:    Level: advanced

393: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
394:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

396: @*/
397: PetscErrorCode  VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
398: {
399:   Vec_MPI        *v;


405:   v  = (Vec_MPI*)g->data;
406:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
407:   if (!v->localupdate) return(0);
408: 
409:   if (scattermode == SCATTER_REVERSE) {
410:     VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);
411:   } else {
412:     VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);
413:   }
414:   return(0);
415: }

419: /*@
420:    VecGhostUpdateEnd - End the vector scatter to update the vector from
421:    local representation to global or global representation to local.

423:    Collective on Vec

425:    Input Parameters:
426: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
427: .  insertmode - one of ADD_VALUES or INSERT_VALUES
428: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

430:    Notes:

432:    Use the following to update the ghost regions with correct values from the owning process
433: .vb
434:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
435:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
436: .ve

438:    Use the following to accumulate the ghost region values onto the owning processors
439: .vb
440:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
441:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
442: .ve

444:    To accumulate the ghost region values onto the owning processors and then update
445:    the ghost regions correctly, call the later followed by the former, i.e.,
446: .vb
447:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
448:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
449:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
450:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
451: .ve

453:    Level: advanced

455: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
456:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

458: @*/
459: PetscErrorCode  VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
460: {
461:   Vec_MPI        *v;


467:   v  = (Vec_MPI*)g->data;
468:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
469:   if (!v->localupdate) return(0);

471:   if (scattermode == SCATTER_REVERSE) {
472:     VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);
473:   } else {
474:     VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);
475:   }
476:   return(0);
477: }

481: /*@C
482:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
483:    the caller allocates the array space.

485:    Collective on MPI_Comm

487:    Input Parameters:
488: +  comm - the MPI communicator to use
489: .  n - local vector length 
490: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
491: .  nghost - number of local ghost points
492: .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
493: -  array - the space to store the vector values (as long as n + nghost)

495:    Output Parameter:
496: .  vv - the global vector representation (without ghost points as part of vector)
497:  
498:    Notes:
499:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
500:    of the vector.

502:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

504:    Level: advanced

506:    Concepts: vectors^creating with array
507:    Concepts: vectors^ghosted

509: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
510:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
511:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

513: @*/
514: PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
515: {
516:   PetscErrorCode         ierr;
517:   Vec_MPI                *w;
518:   PetscScalar            *larray;
519:   IS                     from,to;
520:   ISLocalToGlobalMapping ltog;
521:   PetscInt               rstart,i,*indices;

524:   *vv = 0;

526:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
527:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
528:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
529:   PetscSplitOwnership(comm,&n,&N);
530:   /* Create global representation */
531:   VecCreate(comm,vv);
532:   VecSetSizes(*vv,n,N);
533:   VecCreate_MPI_Private(*vv,nghost,array);
534:   w    = (Vec_MPI *)(*vv)->data;
535:   /* Create local representation */
536:   VecGetArray(*vv,&larray);
537:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
538:   PetscLogObjectParent(*vv,w->localrep);
539:   VecRestoreArray(*vv,&larray);

541:   /*
542:        Create scatter context for scattering (updating) ghost values 
543:   */
544:   ISCreateGeneral(comm,nghost,ghosts,&from);
545:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
546:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
547:   PetscLogObjectParent(*vv,w->localupdate);
548:   ISDestroy(to);
549:   ISDestroy(from);

551:   /* set local to global mapping for ghosted vector */
552:   PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
553:   VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
554:   for (i=0; i<n; i++) {
555:     indices[i] = rstart + i;
556:   }
557:   for (i=0; i<nghost; i++) {
558:     indices[n+i] = ghosts[i];
559:   }
560:   ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);
561:   PetscFree(indices);
562:   VecSetLocalToGlobalMapping(*vv,ltog);
563:   ISLocalToGlobalMappingDestroy(ltog);
564:   PetscFree(indices);
565:   return(0);
566: }

570: /*@
571:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

573:    Collective on MPI_Comm

575:    Input Parameters:
576: +  comm - the MPI communicator to use
577: .  n - local vector length 
578: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
579: .  nghost - number of local ghost points
580: -  ghosts - global indices of ghost points

582:    Output Parameter:
583: .  vv - the global vector representation (without ghost points as part of vector)
584:  
585:    Notes:
586:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
587:    of the vector.

589:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

591:    Level: advanced

593:    Concepts: vectors^ghosted

595: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
596:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
597:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
598:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

600: @*/
601: PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
602: {

606:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
607:   return(0);
608: }

612: PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
613: {
615:   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
616:   PetscScalar    *array;

619:   VecCreate(win->comm,v);
620:   VecSetSizes(*v,win->map.n,win->map.N);
621:   VecCreate_MPI_Private(*v,w->nghost,0);
622:   vw   = (Vec_MPI *)(*v)->data;
623:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

625:   /* save local representation of the parallel vector (and scatter) if it exists */
626:   if (w->localrep) {
627:     VecGetArray(*v,&array);
628:     VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);
629:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
630:     VecRestoreArray(*v,&array);
631:     PetscLogObjectParent(*v,vw->localrep);
632:     vw->localupdate = w->localupdate;
633:     if (vw->localupdate) {
634:       PetscObjectReference((PetscObject)vw->localupdate);
635:     }
636:   }

638:   /* New vector should inherit stashing property of parent */
639:   (*v)->stash.donotstash = win->stash.donotstash;
640:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
641: 
642:   PetscOListDuplicate(win->olist,&(*v)->olist);
643:   PetscFListDuplicate(win->qlist,&(*v)->qlist);
644:   if (win->mapping) {
645:     PetscObjectReference((PetscObject)win->mapping);
646:     (*v)->mapping = win->mapping;
647:   }
648:   if (win->bmapping) {
649:     PetscObjectReference((PetscObject)win->bmapping);
650:     (*v)->bmapping = win->bmapping;
651:   }
652:   (*v)->map.bs    = win->map.bs;
653:   (*v)->bstash.bs = win->bstash.bs;

655:   return(0);
656: }

658: /* ------------------------------------------------------------------------------------------*/
661: /*@C
662:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
663:    the caller allocates the array space. Indices in the ghost region are based on blocks.

665:    Collective on MPI_Comm

667:    Input Parameters:
668: +  comm - the MPI communicator to use
669: .  bs - block size
670: .  n - local vector length 
671: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
672: .  nghost - number of local ghost blocks
673: .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
674: -  array - the space to store the vector values (as long as n + nghost*bs)

676:    Output Parameter:
677: .  vv - the global vector representation (without ghost points as part of vector)
678:  
679:    Notes:
680:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
681:    of the vector.

683:    n is the local vector size (total local size not the number of blocks) while nghost
684:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
685:    portion is bs*nghost

687:    Level: advanced

689:    Concepts: vectors^creating ghosted
690:    Concepts: vectors^creating with array

692: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
693:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
694:           VecCreateGhostWithArray(), VecCreateGhostBlocked()

696: @*/
697: PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
698: {
700:   Vec_MPI        *w;
701:   PetscScalar    *larray;
702:   IS             from,to;

705:   *vv = 0;

707:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
708:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
709:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
710:   PetscSplitOwnership(comm,&n,&N);
711:   /* Create global representation */
712:   VecCreate(comm,vv);
713:   VecSetSizes(*vv,n,N);
714:   VecCreate_MPI_Private(*vv,nghost*bs,array);
715:   VecSetBlockSize(*vv,bs);
716:   w    = (Vec_MPI *)(*vv)->data;
717:   /* Create local representation */
718:   VecGetArray(*vv,&larray);
719:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
720:   VecSetBlockSize(w->localrep,bs);
721:   PetscLogObjectParent(*vv,w->localrep);
722:   VecRestoreArray(*vv,&larray);

724:   /*
725:        Create scatter context for scattering (updating) ghost values 
726:   */
727:   ISCreateBlock(comm,bs,nghost,ghosts,&from);
728:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
729:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
730:   PetscLogObjectParent(*vv,w->localupdate);
731:   ISDestroy(to);
732:   ISDestroy(from);

734:   return(0);
735: }

739: /*@
740:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
741:         The indicing of the ghost points is done with blocks.

743:    Collective on MPI_Comm

745:    Input Parameters:
746: +  comm - the MPI communicator to use
747: .  bs - the block size
748: .  n - local vector length 
749: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
750: .  nghost - number of local ghost blocks
751: -  ghosts - global indices of ghost blocks

753:    Output Parameter:
754: .  vv - the global vector representation (without ghost points as part of vector)
755:  
756:    Notes:
757:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
758:    of the vector.

760:    n is the local vector size (total local size not the number of blocks) while nghost
761:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
762:    portion is bs*nghost

764:    Level: advanced

766:    Concepts: vectors^ghosted

768: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
769:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
770:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

772: @*/
773: PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
774: {

778:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
779:   return(0);
780: }

782: /*
783:     These introduce a ghosted vector where the ghosting is determined by the call to 
784:   VecSetLocalToGlobalMapping()
785: */

789: PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
790: {
792:   Vec_MPI        *v = (Vec_MPI *)vv->data;

795:   v->nghost = map->n - vv->map.n;

797:   /* we need to make longer the array space that was allocated when the vector was created */
798:   PetscFree(v->array_allocated);
799:   PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
800:   v->array = v->array_allocated;
801: 
802:   /* Create local representation */
803:   VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
804:   PetscLogObjectParent(vv,v->localrep);
805:   return(0);
806: }


811: PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
812: {
814:   Vec_MPI        *v = (Vec_MPI *)vv->data;

817:   VecSetValues(v->localrep,n,ix,values,mode);
818:   return(0);
819: }

824: PetscErrorCode  VecCreate_FETI(Vec vv)
825: {

829:   VecSetType(vv,VECMPI);
830: 
831:   /* overwrite the functions to handle setting values locally */
832:   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
833:   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
834:   vv->ops->assemblybegin           = 0;
835:   vv->ops->assemblyend             = 0;
836:   vv->ops->setvaluesblocked        = 0;
837:   vv->ops->setvaluesblocked        = 0;

839:   return(0);
840: }