Actual source code: pbvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:    This file contains routines for Parallel vector operations.
  4:  */
 5:  #include src/vec/vec/impls/mpi/pvecimpl.h

  7: /*
  8:        Note this code is very similar to VecPublish_Seq()
  9: */
 12: static PetscErrorCode VecPublish_MPI(PetscObject obj)
 13: {
 15:   return(0);
 16: }

 20: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 21: {
 22:   PetscScalar    sum,work;

 26:   VecDot_Seq(xin,yin,&work);
 27:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 28:   *z = sum;
 29:   return(0);
 30: }

 34: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 35: {
 36:   PetscScalar    sum,work;

 40:   VecTDot_Seq(xin,yin,&work);
 41:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
 42:   *z   = sum;
 43:   return(0);
 44: }

 48: PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
 49: {
 51:   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
 52:     v->stash.donotstash = PETSC_TRUE;
 53:   } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
 54:     v->stash.donotstash = PETSC_FALSE;
 55:   }
 56:   return(0);
 57: }
 58: 
 59: EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
 61: EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);

 66: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
 67: {
 69:   Vec_MPI        *v = (Vec_MPI *)vin->data;

 72:   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
 73:   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
 74:   v->array = (PetscScalar *)a;
 75:   if (v->localrep) {
 76:     VecPlaceArray(v->localrep,a);
 77:   }
 78:   return(0);
 79: }

 81: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
 82: EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);

 84: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
 85:             VecDuplicateVecs_Default,
 86:             VecDestroyVecs_Default,
 87:             VecDot_MPI,
 88:             VecMDot_MPI,
 89:             VecNorm_MPI,
 90:             VecTDot_MPI,
 91:             VecMTDot_MPI,
 92:             VecScale_Seq,
 93:             VecCopy_Seq, /* 10 */
 94:             VecSet_Seq,
 95:             VecSwap_Seq,
 96:             VecAXPY_Seq,
 97:             VecAXPBY_Seq,
 98:             VecMAXPY_Seq,
 99:             VecAYPX_Seq,
100:             VecWAXPY_Seq,
101:             VecPointwiseMult_Seq,
102:             VecPointwiseDivide_Seq,
103:             VecSetValues_MPI, /* 20 */
104:             VecAssemblyBegin_MPI,
105:             VecAssemblyEnd_MPI,
106:             VecGetArray_Seq,
107:             VecGetSize_MPI,
108:             VecGetSize_Seq,
109:             VecRestoreArray_Seq,
110:             VecMax_MPI,
111:             VecMin_MPI,
112:             VecSetRandom_Seq,
113:             VecSetOption_MPI,
114:             VecSetValuesBlocked_MPI,
115:             VecDestroy_MPI,
116:             VecView_MPI,
117:             VecPlaceArray_MPI,
118:             VecReplaceArray_Seq,
119:             VecDot_Seq,
120:             VecTDot_Seq,
121:             VecNorm_Seq,
122:             VecMDot_Seq,
123:             VecMTDot_Seq,
124:             VecLoadIntoVector_Default,
125:             VecReciprocal_Default,
126:             0, /* VecViewNative... */
127:             VecConjugate_Seq,
128:             0,
129:             0,
130:             VecResetArray_Seq,
131:             0,
132:             VecMaxPointwiseDivide_Seq,
133:             VecLoad_Binary,
134:             VecPointwiseMax_Seq,
135:             VecPointwiseMaxAbs_Seq,
136:             VecPointwiseMin_Seq,
137:             VecGetValues_MPI};

141: /*
142:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
143:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
144:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
145: */
146: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[])
147: {
148:   Vec_MPI        *s;


153:   v->bops->publish   = VecPublish_MPI;
154:   PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->map.n+nghost+1)*sizeof(PetscScalar));
155:   PetscNew(Vec_MPI,&s);
156:   PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
157:   v->data        = (void*)s;
158:   s->nghost      = nghost;
159:   v->mapping     = 0;
160:   v->bmapping    = 0;
161:   v->petscnative = PETSC_TRUE;

163:   if (v->map.bs == -1) v->map.bs = 1;
164:   PetscMapInitialize(v->comm,&v->map);
165:   if (array) {
166:     s->array           = (PetscScalar *)array;
167:     s->array_allocated = 0;
168:   } else {
169:     PetscInt n         = v->map.n+nghost;
170:     PetscMalloc(n*sizeof(PetscScalar),&s->array);
171:     s->array_allocated = s->array;
172:     PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));
173:   }

175:   /* By default parallel vectors do not have local representation */
176:   s->localrep    = 0;
177:   s->localupdate = 0;

179:   v->stash.insertmode  = NOT_SET_VALUES;
180:   /* create the stashes. The block-size for bstash is set later when 
181:      VecSetValuesBlocked is called.
182:   */
183:   VecStashCreate_Private(v->comm,1,&v->stash);
184:   VecStashCreate_Private(v->comm,v->map.bs,&v->bstash);
185: 
186: #if defined(PETSC_HAVE_MATLAB)
187:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
188:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
189: #endif
190:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
191:   PetscPublishAll(v);
192:   return(0);
193: }

195: /*MC
196:    VECMPI - VECMPI = "mpi" - The basic parallel vector

198:    Options Database Keys:
199: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()

201:   Level: beginner

203: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
204: M*/

209: PetscErrorCode  VecCreate_MPI(Vec vv)
210: {

214:   VecCreate_MPI_Private(vv,0,0);
215:   return(0);
216: }

221: /*@C
222:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
223:    where the user provides the array space to store the vector values.

225:    Collective on MPI_Comm

227:    Input Parameters:
228: +  comm  - the MPI communicator to use
229: .  n     - local vector length, cannot be PETSC_DECIDE
230: .  N     - global vector length (or PETSC_DECIDE to have calculated)
231: -  array - the user provided array to store the vector values

233:    Output Parameter:
234: .  vv - the vector
235:  
236:    Notes:
237:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
238:    same type as an existing vector.

240:    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
241:    at a later stage to SET the array for storing the vector values.

243:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
244:    The user should not free the array until the vector is destroyed.

246:    Level: intermediate

248:    Concepts: vectors^creating with array

250: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
251:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

253: @*/
254: PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
255: {

259:   if (n == PETSC_DECIDE) {
260:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
261:   }
262:   PetscSplitOwnership(comm,&n,&N);
263:   VecCreate(comm,vv);
264:   VecSetSizes(*vv,n,N);
265:   VecCreate_MPI_Private(*vv,0,array);
266:   return(0);
267: }

271: /*@
272:     VecGhostGetLocalForm - Obtains the local ghosted representation of 
273:     a parallel vector created with VecCreateGhost().

275:     Not Collective

277:     Input Parameter:
278: .   g - the global vector. Vector must be have been obtained with either
279:         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().

281:     Output Parameter:
282: .   l - the local (ghosted) representation

284:     Notes:
285:     This routine does not actually update the ghost values, but rather it
286:     returns a sequential vector that includes the locations for the ghost
287:     values and their current values. The returned vector and the original
288:     vector passed in share the same array that contains the actual vector data.

290:     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
291:     finished using the object.

293:     Level: advanced

295:    Concepts: vectors^ghost point access

297: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()

299: @*/
300: PetscErrorCode  VecGhostGetLocalForm(Vec g,Vec *l)
301: {
303:   PetscTruth     isseq,ismpi;


309:   PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
310:   PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
311:   if (ismpi) {
312:     Vec_MPI *v  = (Vec_MPI*)g->data;
313:     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
314:     *l = v->localrep;
315:   } else if (isseq) {
316:     *l = g;
317:   } else {
318:     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
319:   }
320:   PetscObjectReference((PetscObject)*l);
321:   return(0);
322: }

326: /*@
327:     VecGhostRestoreLocalForm - Restores the local ghosted representation of 
328:     a parallel vector obtained with VecGhostGetLocalForm().

330:     Not Collective

332:     Input Parameter:
333: +   g - the global vector
334: -   l - the local (ghosted) representation

336:     Notes:
337:     This routine does not actually update the ghost values, but rather it
338:     returns a sequential vector that includes the locations for the ghost values
339:     and their current values.

341:     Level: advanced

343: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
344: @*/
345: PetscErrorCode  VecGhostRestoreLocalForm(Vec g,Vec *l)
346: {
348:   PetscObjectDereference((PetscObject)*l);
349:   return(0);
350: }

354: /*@
355:    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
356:    local representation to global or global representation to local.

358:    Collective on Vec

360:    Input Parameters:
361: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
362: .  insertmode - one of ADD_VALUES or INSERT_VALUES
363: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

365:    Notes:
366:    Use the following to update the ghost regions with correct values from the owning process
367: .vb
368:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
369:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
370: .ve

372:    Use the following to accumulate the ghost region values onto the owning processors
373: .vb
374:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
375:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
376: .ve

378:    To accumulate the ghost region values onto the owning processors and then update
379:    the ghost regions correctly, call the later followed by the former, i.e.,
380: .vb
381:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
382:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
383:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
384:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
385: .ve

387:    Level: advanced

389: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
390:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

392: @*/
393: PetscErrorCode  VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
394: {
395:   Vec_MPI        *v;


401:   v  = (Vec_MPI*)g->data;
402:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
403:   if (!v->localupdate) return(0);
404: 
405:   if (scattermode == SCATTER_REVERSE) {
406:     VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
407:   } else {
408:     VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
409:   }
410:   return(0);
411: }

415: /*@
416:    VecGhostUpdateEnd - End the vector scatter to update the vector from
417:    local representation to global or global representation to local.

419:    Collective on Vec

421:    Input Parameters:
422: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
423: .  insertmode - one of ADD_VALUES or INSERT_VALUES
424: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

426:    Notes:

428:    Use the following to update the ghost regions with correct values from the owning process
429: .vb
430:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
431:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
432: .ve

434:    Use the following to accumulate the ghost region values onto the owning processors
435: .vb
436:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
437:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
438: .ve

440:    To accumulate the ghost region values onto the owning processors and then update
441:    the ghost regions correctly, call the later followed by the former, i.e.,
442: .vb
443:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
444:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
445:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
446:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
447: .ve

449:    Level: advanced

451: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
452:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

454: @*/
455: PetscErrorCode  VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
456: {
457:   Vec_MPI        *v;


463:   v  = (Vec_MPI*)g->data;
464:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
465:   if (!v->localupdate) return(0);

467:   if (scattermode == SCATTER_REVERSE) {
468:     VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
469:   } else {
470:     VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
471:   }
472:   return(0);
473: }

477: /*@C
478:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
479:    the caller allocates the array space.

481:    Collective on MPI_Comm

483:    Input Parameters:
484: +  comm - the MPI communicator to use
485: .  n - local vector length 
486: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
487: .  nghost - number of local ghost points
488: .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
489: -  array - the space to store the vector values (as long as n + nghost)

491:    Output Parameter:
492: .  vv - the global vector representation (without ghost points as part of vector)
493:  
494:    Notes:
495:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
496:    of the vector.

498:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

500:    Level: advanced

502:    Concepts: vectors^creating with array
503:    Concepts: vectors^ghosted

505: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
506:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
507:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

509: @*/
510: PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
511: {
512:   PetscErrorCode         ierr;
513:   Vec_MPI                *w;
514:   PetscScalar            *larray;
515:   IS                     from,to;
516:   ISLocalToGlobalMapping ltog;
517:   PetscInt               rstart,i,*indices;

520:   *vv = 0;

522:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
523:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
524:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
525:   PetscSplitOwnership(comm,&n,&N);
526:   /* Create global representation */
527:   VecCreate(comm,vv);
528:   VecSetSizes(*vv,n,N);
529:   VecCreate_MPI_Private(*vv,nghost,array);
530:   w    = (Vec_MPI *)(*vv)->data;
531:   /* Create local representation */
532:   VecGetArray(*vv,&larray);
533:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
534:   PetscLogObjectParent(*vv,w->localrep);
535:   VecRestoreArray(*vv,&larray);

537:   /*
538:        Create scatter context for scattering (updating) ghost values 
539:   */
540:   ISCreateGeneral(comm,nghost,ghosts,&from);
541:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
542:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
543:   PetscLogObjectParent(*vv,w->localupdate);
544:   ISDestroy(to);
545:   ISDestroy(from);

547:   /* set local to global mapping for ghosted vector */
548:   PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
549:   VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
550:   for (i=0; i<n; i++) {
551:     indices[i] = rstart + i;
552:   }
553:   for (i=0; i<nghost; i++) {
554:     indices[n+i] = ghosts[i];
555:   }
556:   ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);
557:   PetscFree(indices);
558:   VecSetLocalToGlobalMapping(*vv,ltog);
559:   ISLocalToGlobalMappingDestroy(ltog);
560:   PetscFree(indices);
561:   return(0);
562: }

566: /*@
567:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

569:    Collective on MPI_Comm

571:    Input Parameters:
572: +  comm - the MPI communicator to use
573: .  n - local vector length 
574: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
575: .  nghost - number of local ghost points
576: -  ghosts - global indices of ghost points

578:    Output Parameter:
579: .  vv - the global vector representation (without ghost points as part of vector)
580:  
581:    Notes:
582:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
583:    of the vector.

585:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

587:    Level: advanced

589:    Concepts: vectors^ghosted

591: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
592:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
593:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
594:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

596: @*/
597: PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
598: {

602:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
603:   return(0);
604: }

608: PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
609: {
611:   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
612:   PetscScalar    *array;

615:   VecCreate(win->comm,v);
616:   VecSetSizes(*v,win->map.n,win->map.N);
617:   VecCreate_MPI_Private(*v,w->nghost,0);
618:   vw   = (Vec_MPI *)(*v)->data;
619:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

621:   /* save local representation of the parallel vector (and scatter) if it exists */
622:   if (w->localrep) {
623:     VecGetArray(*v,&array);
624:     VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);
625:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
626:     VecRestoreArray(*v,&array);
627:     PetscLogObjectParent(*v,vw->localrep);
628:     vw->localupdate = w->localupdate;
629:     if (vw->localupdate) {
630:       PetscObjectReference((PetscObject)vw->localupdate);
631:     }
632:   }

634:   /* New vector should inherit stashing property of parent */
635:   (*v)->stash.donotstash = win->stash.donotstash;
636: 
637:   PetscOListDuplicate(win->olist,&(*v)->olist);
638:   PetscFListDuplicate(win->qlist,&(*v)->qlist);
639:   if (win->mapping) {
640:     (*v)->mapping = win->mapping;
641:     PetscObjectReference((PetscObject)win->mapping);
642:   }
643:   if (win->bmapping) {
644:     (*v)->bmapping = win->bmapping;
645:     PetscObjectReference((PetscObject)win->bmapping);
646:   }
647:   (*v)->map.bs        = win->map.bs;
648:   (*v)->bstash.bs = win->bstash.bs;

650:   return(0);
651: }

653: /* ------------------------------------------------------------------------------------------*/
656: /*@C
657:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
658:    the caller allocates the array space. Indices in the ghost region are based on blocks.

660:    Collective on MPI_Comm

662:    Input Parameters:
663: +  comm - the MPI communicator to use
664: .  bs - block size
665: .  n - local vector length 
666: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
667: .  nghost - number of local ghost blocks
668: .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
669: -  array - the space to store the vector values (as long as n + nghost*bs)

671:    Output Parameter:
672: .  vv - the global vector representation (without ghost points as part of vector)
673:  
674:    Notes:
675:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
676:    of the vector.

678:    n is the local vector size (total local size not the number of blocks) while nghost
679:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
680:    portion is bs*nghost

682:    Level: advanced

684:    Concepts: vectors^creating ghosted
685:    Concepts: vectors^creating with array

687: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
688:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
689:           VecCreateGhostWithArray(), VecCreateGhostBlocked()

691: @*/
692: PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
693: {
695:   Vec_MPI        *w;
696:   PetscScalar    *larray;
697:   IS             from,to;

700:   *vv = 0;

702:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
703:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
704:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
705:   PetscSplitOwnership(comm,&n,&N);
706:   /* Create global representation */
707:   VecCreate(comm,vv);
708:   VecSetSizes(*vv,n,N);
709:   VecCreate_MPI_Private(*vv,nghost*bs,array);
710:   VecSetBlockSize(*vv,bs);
711:   w    = (Vec_MPI *)(*vv)->data;
712:   /* Create local representation */
713:   VecGetArray(*vv,&larray);
714:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
715:   VecSetBlockSize(w->localrep,bs);
716:   PetscLogObjectParent(*vv,w->localrep);
717:   VecRestoreArray(*vv,&larray);

719:   /*
720:        Create scatter context for scattering (updating) ghost values 
721:   */
722:   ISCreateBlock(comm,bs,nghost,ghosts,&from);
723:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
724:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
725:   PetscLogObjectParent(*vv,w->localupdate);
726:   ISDestroy(to);
727:   ISDestroy(from);

729:   return(0);
730: }

734: /*@
735:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
736:         The indicing of the ghost points is done with blocks.

738:    Collective on MPI_Comm

740:    Input Parameters:
741: +  comm - the MPI communicator to use
742: .  bs - the block size
743: .  n - local vector length 
744: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
745: .  nghost - number of local ghost blocks
746: -  ghosts - global indices of ghost blocks

748:    Output Parameter:
749: .  vv - the global vector representation (without ghost points as part of vector)
750:  
751:    Notes:
752:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
753:    of the vector.

755:    n is the local vector size (total local size not the number of blocks) while nghost
756:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
757:    portion is bs*nghost

759:    Level: advanced

761:    Concepts: vectors^ghosted

763: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
764:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
765:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

767: @*/
768: PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
769: {

773:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
774:   return(0);
775: }

777: /*
778:     These introduce a ghosted vector where the ghosting is determined by the call to 
779:   VecSetLocalToGlobalMapping()
780: */

784: PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
785: {
787:   Vec_MPI        *v = (Vec_MPI *)vv->data;

790:   v->nghost = map->n - vv->map.n;

792:   /* we need to make longer the array space that was allocated when the vector was created */
793:   PetscFree(v->array_allocated);
794:   PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
795:   v->array = v->array_allocated;
796: 
797:   /* Create local representation */
798:   VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
799:   PetscLogObjectParent(vv,v->localrep);
800:   return(0);
801: }


806: PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
807: {
809:   Vec_MPI        *v = (Vec_MPI *)vv->data;

812:   VecSetValues(v->localrep,n,ix,values,mode);
813:   return(0);
814: }

819: PetscErrorCode  VecCreate_FETI(Vec vv)
820: {

824:   VecSetType(vv,VECMPI);
825: 
826:   /* overwrite the functions to handle setting values locally */
827:   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
828:   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
829:   vv->ops->assemblybegin           = 0;
830:   vv->ops->assemblyend             = 0;
831:   vv->ops->setvaluesblocked        = 0;
832:   vv->ops->setvaluesblocked        = 0;

834:   return(0);
835: }