Actual source code: pdvec.c
petsc-3.11.4 2019-09-28
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
6: #include <petsc/private/glvisviewerimpl.h>
7: #include <petsc/private/glvisvecimpl.h>
8: #include <petscviewerhdf5.h>
10: PetscErrorCode VecDestroy_MPI(Vec v)
11: {
12: Vec_MPI *x = (Vec_MPI*)v->data;
16: #if defined(PETSC_USE_LOG)
17: PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
18: #endif
19: if (!x) return(0);
20: PetscFree(x->array_allocated);
22: /* Destroy local representation of vector if it exists */
23: if (x->localrep) {
24: VecDestroy(&x->localrep);
25: VecScatterDestroy(&x->localupdate);
26: }
27: VecAssemblyReset_MPI(v);
29: /* Destroy the stashes: note the order - so that the tags are freed properly */
30: VecStashDestroy_Private(&v->bstash);
31: VecStashDestroy_Private(&v->stash);
32: PetscFree(v->data);
33: return(0);
34: }
36: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
37: {
38: PetscErrorCode ierr;
39: PetscInt i,work = xin->map->n,cnt,len,nLen;
40: PetscMPIInt j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
41: MPI_Status status;
42: PetscScalar *values;
43: const PetscScalar *xarray;
44: const char *name;
45: PetscViewerFormat format;
48: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
49: PetscViewerGetFormat(viewer,&format);
50: if (format == PETSC_VIEWER_LOAD_BALANCE) {
51: PetscInt nmax = 0,nmin = xin->map->n,navg;
52: for (i=0; i<(PetscInt)size; i++) {
53: nmax = PetscMax(nmax,xin->map->range[i+1] - xin->map->range[i]);
54: nmin = PetscMin(nmin,xin->map->range[i+1] - xin->map->range[i]);
55: }
56: navg = xin->map->N/size;
57: PetscViewerASCIIPrintf(viewer," Load Balance - Local vector size Min %D avg %D max %D\n",nmin,navg,nmax);
58: return(0);
59: }
61: VecGetArrayRead(xin,&xarray);
62: /* determine maximum message to arrive */
63: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
64: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
65: if (format == PETSC_VIEWER_ASCII_GLVIS) { rank = 0, len = 0; } /* no parallel distributed write support from GLVis */
66: if (!rank) {
67: PetscMalloc1(len,&values);
68: /*
69: MATLAB format and ASCII format are very similar except
70: MATLAB uses %18.16e format while ASCII uses %g
71: */
72: if (format == PETSC_VIEWER_ASCII_MATLAB) {
73: PetscObjectGetName((PetscObject)xin,&name);
74: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
75: for (i=0; i<xin->map->n; i++) {
76: #if defined(PETSC_USE_COMPLEX)
77: if (PetscImaginaryPart(xarray[i]) > 0.0) {
78: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
79: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
80: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
81: } else {
82: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
83: }
84: #else
85: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
86: #endif
87: }
88: /* receive and print messages */
89: for (j=1; j<size; j++) {
90: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
91: MPI_Get_count(&status,MPIU_SCALAR,&n);
92: for (i=0; i<n; i++) {
93: #if defined(PETSC_USE_COMPLEX)
94: if (PetscImaginaryPart(values[i]) > 0.0) {
95: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
96: } else if (PetscImaginaryPart(values[i]) < 0.0) {
97: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
98: } else {
99: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
100: }
101: #else
102: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
103: #endif
104: }
105: }
106: PetscViewerASCIIPrintf(viewer,"];\n");
108: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
109: for (i=0; i<xin->map->n; i++) {
110: #if defined(PETSC_USE_COMPLEX)
111: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
112: #else
113: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
114: #endif
115: }
116: /* receive and print messages */
117: for (j=1; j<size; j++) {
118: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
119: MPI_Get_count(&status,MPIU_SCALAR,&n);
120: for (i=0; i<n; i++) {
121: #if defined(PETSC_USE_COMPLEX)
122: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
123: #else
124: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
125: #endif
126: }
127: }
128: } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
129: /*
130: state 0: No header has been output
131: state 1: Only POINT_DATA has been output
132: state 2: Only CELL_DATA has been output
133: state 3: Output both, POINT_DATA last
134: state 4: Output both, CELL_DATA last
135: */
136: static PetscInt stateId = -1;
137: int outputState = 0;
138: int doOutput = 0;
139: PetscBool hasState;
140: PetscInt bs, b;
142: if (stateId < 0) {
143: PetscObjectComposedDataRegister(&stateId);
144: }
145: PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
146: if (!hasState) outputState = 0;
148: PetscObjectGetName((PetscObject)xin,&name);
149: VecGetLocalSize(xin, &nLen);
150: PetscMPIIntCast(nLen,&n);
151: VecGetBlockSize(xin, &bs);
152: if (format == PETSC_VIEWER_ASCII_VTK) {
153: if (outputState == 0) {
154: outputState = 1;
155: doOutput = 1;
156: } else if (outputState == 1) doOutput = 0;
157: else if (outputState == 2) {
158: outputState = 3;
159: doOutput = 1;
160: } else if (outputState == 3) doOutput = 0;
161: else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
163: if (doOutput) {
164: PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
165: }
166: } else {
167: if (outputState == 0) {
168: outputState = 2;
169: doOutput = 1;
170: } else if (outputState == 1) {
171: outputState = 4;
172: doOutput = 1;
173: } else if (outputState == 2) doOutput = 0;
174: else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
175: else if (outputState == 4) doOutput = 0;
177: if (doOutput) {
178: PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
179: }
180: }
181: PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
182: if (name) {
183: if (bs == 3) {
184: PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
185: } else {
186: PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
187: }
188: } else {
189: PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
190: }
191: if (bs != 3) {
192: PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
193: }
194: for (i=0; i<n/bs; i++) {
195: for (b=0; b<bs; b++) {
196: if (b > 0) {
197: PetscViewerASCIIPrintf(viewer," ");
198: }
199: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
200: }
201: PetscViewerASCIIPrintf(viewer,"\n");
202: }
203: for (j=1; j<size; j++) {
204: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
205: MPI_Get_count(&status,MPIU_SCALAR,&n);
206: for (i=0; i<n/bs; i++) {
207: for (b=0; b<bs; b++) {
208: if (b > 0) {
209: PetscViewerASCIIPrintf(viewer," ");
210: }
211: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
212: }
213: PetscViewerASCIIPrintf(viewer,"\n");
214: }
215: }
216: } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
217: PetscInt bs, b;
219: VecGetLocalSize(xin, &nLen);
220: PetscMPIIntCast(nLen,&n);
221: VecGetBlockSize(xin, &bs);
222: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
224: for (i=0; i<n/bs; i++) {
225: for (b=0; b<bs; b++) {
226: if (b > 0) {
227: PetscViewerASCIIPrintf(viewer," ");
228: }
229: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
230: }
231: for (b=bs; b<3; b++) {
232: PetscViewerASCIIPrintf(viewer," 0.0");
233: }
234: PetscViewerASCIIPrintf(viewer,"\n");
235: }
236: for (j=1; j<size; j++) {
237: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
238: MPI_Get_count(&status,MPIU_SCALAR,&n);
239: for (i=0; i<n/bs; i++) {
240: for (b=0; b<bs; b++) {
241: if (b > 0) {
242: PetscViewerASCIIPrintf(viewer," ");
243: }
244: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
245: }
246: for (b=bs; b<3; b++) {
247: PetscViewerASCIIPrintf(viewer," 0.0");
248: }
249: PetscViewerASCIIPrintf(viewer,"\n");
250: }
251: }
252: } else if (format == PETSC_VIEWER_ASCII_PCICE) {
253: PetscInt bs, b, vertexCount = 1;
255: VecGetLocalSize(xin, &nLen);
256: PetscMPIIntCast(nLen,&n);
257: VecGetBlockSize(xin, &bs);
258: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
260: PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
261: for (i=0; i<n/bs; i++) {
262: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
263: for (b=0; b<bs; b++) {
264: if (b > 0) {
265: PetscViewerASCIIPrintf(viewer," ");
266: }
267: #if !defined(PETSC_USE_COMPLEX)
268: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
269: #endif
270: }
271: PetscViewerASCIIPrintf(viewer,"\n");
272: }
273: for (j=1; j<size; j++) {
274: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
275: MPI_Get_count(&status,MPIU_SCALAR,&n);
276: for (i=0; i<n/bs; i++) {
277: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
278: for (b=0; b<bs; b++) {
279: if (b > 0) {
280: PetscViewerASCIIPrintf(viewer," ");
281: }
282: #if !defined(PETSC_USE_COMPLEX)
283: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
284: #endif
285: }
286: PetscViewerASCIIPrintf(viewer,"\n");
287: }
288: }
289: } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
290: /* GLVis ASCII visualization/dump: this function mimicks mfem::GridFunction::Save() */
291: const PetscScalar *array;
292: PetscInt i,n,vdim, ordering = 1; /* mfem::FiniteElementSpace::Ordering::byVDIM */
293: PetscContainer glvis_container;
294: PetscViewerGLVisVecInfo glvis_vec_info;
295: PetscViewerGLVisInfo glvis_info;
296: PetscErrorCode ierr;
298: /* mfem::FiniteElementSpace::Save() */
299: VecGetBlockSize(xin,&vdim);
300: PetscViewerASCIIPrintf(viewer,"FiniteElementSpace\n");
301: PetscObjectQuery((PetscObject)xin,"_glvis_info_container",(PetscObject*)&glvis_container);
302: if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_PLIB,"Missing GLVis container");
303: PetscContainerGetPointer(glvis_container,(void**)&glvis_vec_info);
304: PetscViewerASCIIPrintf(viewer,"%s\n",glvis_vec_info->fec_type);
305: PetscViewerASCIIPrintf(viewer,"VDim: %d\n",vdim);
306: PetscViewerASCIIPrintf(viewer,"Ordering: %d\n",ordering);
307: PetscViewerASCIIPrintf(viewer,"\n");
308: /* mfem::Vector::Print() */
309: PetscObjectQuery((PetscObject)viewer,"_glvis_info_container",(PetscObject*)&glvis_container);
310: if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_PLIB,"Missing GLVis container");
311: PetscContainerGetPointer(glvis_container,(void**)&glvis_info);
312: if (glvis_info->enabled) {
313: VecGetLocalSize(xin,&n);
314: VecGetArrayRead(xin,&array);
315: for (i=0;i<n;i++) {
316: PetscViewerASCIIPrintf(viewer,glvis_info->fmt,(double)PetscRealPart(array[i]));
317: PetscViewerASCIIPrintf(viewer,"\n");
318: }
319: VecRestoreArrayRead(xin,&array);
320: }
321: } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
322: /* No info */
323: } else {
324: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
325: cnt = 0;
326: for (i=0; i<xin->map->n; i++) {
327: if (format == PETSC_VIEWER_ASCII_INDEX) {
328: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
329: }
330: #if defined(PETSC_USE_COMPLEX)
331: if (PetscImaginaryPart(xarray[i]) > 0.0) {
332: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
333: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
334: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
335: } else {
336: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
337: }
338: #else
339: PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
340: #endif
341: }
342: /* receive and print messages */
343: for (j=1; j<size; j++) {
344: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
345: MPI_Get_count(&status,MPIU_SCALAR,&n);
346: if (format != PETSC_VIEWER_ASCII_COMMON) {
347: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
348: }
349: for (i=0; i<n; i++) {
350: if (format == PETSC_VIEWER_ASCII_INDEX) {
351: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
352: }
353: #if defined(PETSC_USE_COMPLEX)
354: if (PetscImaginaryPart(values[i]) > 0.0) {
355: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
356: } else if (PetscImaginaryPart(values[i]) < 0.0) {
357: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
358: } else {
359: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
360: }
361: #else
362: PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
363: #endif
364: }
365: }
366: }
367: PetscFree(values);
368: } else {
369: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
370: /* Rank 0 is not trying to receive anything, so don't send anything */
371: } else {
372: if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
373: /* this may be a collective operation so make sure everyone calls it */
374: PetscObjectGetName((PetscObject)xin,&name);
375: }
376: MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
377: }
378: }
379: PetscViewerFlush(viewer);
380: VecRestoreArrayRead(xin,&xarray);
381: return(0);
382: }
384: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
385: {
386: PetscErrorCode ierr;
387: PetscMPIInt rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
388: PetscInt len,n,j,tr[2];
389: int fdes;
390: MPI_Status status;
391: PetscScalar *values;
392: const PetscScalar *xarray;
393: FILE *file;
394: #if defined(PETSC_HAVE_MPIIO)
395: PetscBool isMPIIO;
396: #endif
397: PetscBool skipHeader;
398: PetscInt message_count,flowcontrolcount;
399: PetscViewerFormat format;
402: VecGetArrayRead(xin,&xarray);
403: PetscViewerBinaryGetDescriptor(viewer,&fdes);
404: PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);
406: /* determine maximum message to arrive */
407: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
408: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
410: if (!skipHeader) {
411: tr[0] = VEC_FILE_CLASSID;
412: tr[1] = xin->map->N;
413: PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
414: }
416: #if defined(PETSC_HAVE_MPIIO)
417: PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
418: if (!isMPIIO) {
419: #endif
420: PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
421: if (!rank) {
422: PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);
424: len = 0;
425: for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
426: PetscMalloc1(len,&values);
427: PetscMPIIntCast(len,&mesgsize);
428: /* receive and save messages */
429: for (j=1; j<size; j++) {
430: PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);
431: MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
432: MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
433: n = (PetscInt)mesglen;
434: PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
435: }
436: PetscViewerFlowControlEndMaster(viewer,&message_count);
437: PetscFree(values);
438: } else {
439: PetscViewerFlowControlStepWorker(viewer,rank,&message_count);
440: PetscMPIIntCast(xin->map->n,&mesgsize);
441: MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
442: PetscViewerFlowControlEndWorker(viewer,&message_count);
443: }
444: PetscViewerGetFormat(viewer,&format);
445: if (format == PETSC_VIEWER_BINARY_MATLAB) {
446: MPI_Comm comm;
447: FILE *info;
448: const char *name;
450: PetscObjectGetName((PetscObject)xin,&name);
451: PetscObjectGetComm((PetscObject)viewer,&comm);
452: PetscViewerBinaryGetInfoPointer(viewer,&info);
453: PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
454: PetscFPrintf(comm,info,"#$$ Set.%s = PetscBinaryRead(fd);\n",name);
455: PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
456: }
457: #if defined(PETSC_HAVE_MPIIO)
458: } else {
459: MPI_Offset off;
460: MPI_File mfdes;
461: PetscMPIInt lsize;
463: PetscMPIIntCast(xin->map->n,&lsize);
464: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
465: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
466: off += xin->map->rstart*sizeof(PetscScalar); /* off is MPI_Offset, not PetscMPIInt */
467: MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
468: MPIU_File_write_all(mfdes,(void*)xarray,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
469: PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
470: }
471: #endif
473: VecRestoreArrayRead(xin,&xarray);
474: if (!rank) {
475: PetscViewerBinaryGetInfoPointer(viewer,&file);
476: if (file) {
477: if (((PetscObject)xin)->prefix) {
478: PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,PetscAbs(xin->map->bs));
479: } else {
480: PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",PetscAbs(xin->map->bs));
481: }
482: }
483: }
484: return(0);
485: }
487: #include <petscdraw.h>
488: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
489: {
490: PetscDraw draw;
491: PetscBool isnull;
492: PetscDrawLG lg;
493: PetscMPIInt i,size,rank,n,N,*lens = NULL,*disp = NULL;
494: PetscReal *values, *xx = NULL,*yy = NULL;
495: const PetscScalar *xarray;
496: int colors[] = {PETSC_DRAW_RED};
497: PetscErrorCode ierr;
500: PetscViewerDrawGetDraw(viewer,0,&draw);
501: PetscDrawIsNull(draw,&isnull);
502: if (isnull) return(0);
503: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
504: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
505: PetscMPIIntCast(xin->map->n,&n);
506: PetscMPIIntCast(xin->map->N,&N);
508: VecGetArrayRead(xin,&xarray);
509: #if defined(PETSC_USE_COMPLEX)
510: PetscMalloc1(n+1,&values);
511: for (i=0; i<n; i++) values[i] = PetscRealPart(xarray[i]);
512: #else
513: values = (PetscReal*)xarray;
514: #endif
515: if (!rank) {
516: PetscMalloc2(N,&xx,N,&yy);
517: for (i=0; i<N; i++) xx[i] = (PetscReal)i;
518: PetscMalloc2(size,&lens,size,&disp);
519: for (i=0; i<size; i++) lens[i] = (PetscMPIInt)xin->map->range[i+1] - (PetscMPIInt)xin->map->range[i];
520: for (i=0; i<size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
521: }
522: MPI_Gatherv(values,n,MPIU_REAL,yy,lens,disp,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
523: PetscFree2(lens,disp);
524: #if defined(PETSC_USE_COMPLEX)
525: PetscFree(values);
526: #endif
527: VecRestoreArrayRead(xin,&xarray);
529: PetscViewerDrawGetDrawLG(viewer,0,&lg);
530: PetscDrawLGReset(lg);
531: PetscDrawLGSetDimension(lg,1);
532: PetscDrawLGSetColors(lg,colors);
533: if (!rank) {
534: PetscDrawLGAddPoints(lg,N,&xx,&yy);
535: PetscFree2(xx,yy);
536: }
537: PetscDrawLGDraw(lg);
538: PetscDrawLGSave(lg);
539: return(0);
540: }
542: PetscErrorCode VecView_MPI_Draw(Vec xin,PetscViewer viewer)
543: {
544: PetscErrorCode ierr;
545: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
546: PetscInt i,start,end;
547: MPI_Status status;
548: PetscReal min,max,tmp = 0.0;
549: PetscDraw draw;
550: PetscBool isnull;
551: PetscDrawAxis axis;
552: const PetscScalar *xarray;
555: PetscViewerDrawGetDraw(viewer,0,&draw);
556: PetscDrawIsNull(draw,&isnull);
557: if (isnull) return(0);
558: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
559: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
561: VecMin(xin,NULL,&min);
562: VecMax(xin,NULL,&max);
563: if (min == max) {
564: min -= 1.e-5;
565: max += 1.e-5;
566: }
568: PetscDrawCheckResizedWindow(draw);
569: PetscDrawClear(draw);
571: PetscDrawAxisCreate(draw,&axis);
572: PetscDrawAxisSetLimits(axis,0.0,(PetscReal)xin->map->N,min,max);
573: PetscDrawAxisDraw(axis);
574: PetscDrawAxisDestroy(&axis);
576: /* draw local part of vector */
577: VecGetArrayRead(xin,&xarray);
578: VecGetOwnershipRange(xin,&start,&end);
579: if (rank < size-1) { /* send value to right */
580: MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
581: }
582: if (rank) { /* receive value from right */
583: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
584: }
585: PetscDrawCollectiveBegin(draw);
586: if (rank) {
587: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
588: }
589: for (i=1; i<xin->map->n; i++) {
590: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
591: }
592: PetscDrawCollectiveEnd(draw);
593: VecRestoreArrayRead(xin,&xarray);
595: PetscDrawFlush(draw);
596: PetscDrawPause(draw);
597: PetscDrawSave(draw);
598: return(0);
599: }
601: #if defined(PETSC_HAVE_MATLAB_ENGINE)
602: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
603: {
604: PetscErrorCode ierr;
605: PetscMPIInt rank,size,*lens;
606: PetscInt i,N = xin->map->N;
607: const PetscScalar *xarray;
608: PetscScalar *xx;
611: VecGetArrayRead(xin,&xarray);
612: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
613: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
614: if (!rank) {
615: PetscMalloc1(N,&xx);
616: PetscMalloc1(size,&lens);
617: for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];
619: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
620: PetscFree(lens);
622: PetscObjectName((PetscObject)xin);
623: PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);
625: PetscFree(xx);
626: } else {
627: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
628: }
629: VecRestoreArrayRead(xin,&xarray);
630: return(0);
631: }
632: #endif
634: #if defined(PETSC_HAVE_ADIOS)
635: #include <adios.h>
636: #include <adios_read.h>
637: #include <petsc/private/vieweradiosimpl.h>
638: #include <petsc/private/viewerimpl.h>
640: PetscErrorCode VecView_MPI_ADIOS(Vec xin, PetscViewer viewer)
641: {
642: PetscViewer_ADIOS *adios = (PetscViewer_ADIOS*)viewer->data;
643: PetscErrorCode ierr;
644: const char *vecname;
645: int64_t id;
646: PetscInt n,N,rstart;
647: const PetscScalar *array;
648: char nglobalname[16],nlocalname[16],coffset[16];
651: PetscObjectGetName((PetscObject) xin, &vecname);
653: VecGetLocalSize(xin,&n);
654: VecGetSize(xin,&N);
655: VecGetOwnershipRange(xin,&rstart,NULL);
657: sprintf(nlocalname,"%d",(int)n);
658: sprintf(nglobalname,"%d",(int)N);
659: sprintf(coffset,"%d",(int)rstart);
660: id = adios_define_var(Petsc_adios_group,vecname,"",adios_double,nlocalname,nglobalname,coffset);
661: VecGetArrayRead(xin,&array);
662: adios_write_byid(adios->adios_handle,id,array);
663: VecRestoreArrayRead(xin,&array);
665: return(0);
666: }
667: #endif
669: #if defined(PETSC_HAVE_ADIOS2)
670: #include <adios2_c.h>
671: #include <petsc/private/vieweradios2impl.h>
672: #include <petsc/private/viewerimpl.h>
674: PetscErrorCode VecView_MPI_ADIOS2(Vec xin, PetscViewer viewer)
675: {
676: PetscErrorCode ierr;
677: PetscViewer_ADIOS2 *adios2 = (PetscViewer_ADIOS2*)viewer->data;
678: PetscInt n,N,rstart;
679: const char *vecname;
680: const PetscScalar *array;
683: PetscObjectGetName((PetscObject) xin, &vecname);
684: VecGetLocalSize(xin,&n);
685: VecGetSize(xin,&N);
686: VecGetOwnershipRange(xin,&rstart,NULL);
688: VecGetArrayRead(xin,&array);
689: VecRestoreArrayRead(xin,&array);
690: return(0);
691: }
692: #endif
694: #if defined(PETSC_HAVE_HDF5)
695: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
696: {
697: /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
698: hid_t filespace; /* file dataspace identifier */
699: hid_t chunkspace; /* chunk dataset property identifier */
700: hid_t plist_id; /* property list identifier */
701: hid_t dset_id; /* dataset identifier */
702: hid_t memspace; /* memory dataspace identifier */
703: hid_t file_id;
704: hid_t group;
705: hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
706: hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
707: PetscInt bs = PetscAbs(xin->map->bs);
708: hsize_t dim;
709: hsize_t maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
710: PetscInt timestep;
711: PetscInt low;
712: PetscInt64 chunksize;
713: const PetscScalar *x;
714: const char *vecname;
715: PetscErrorCode ierr;
716: PetscBool dim2;
717: PetscBool spoutput;
720: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
721: PetscViewerHDF5GetTimestep(viewer, ×tep);
722: PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
723: PetscViewerHDF5GetSPOutput(viewer,&spoutput);
725: /* Create the dataspace for the dataset.
726: *
727: * dims - holds the current dimensions of the dataset
728: *
729: * maxDims - holds the maximum dimensions of the dataset (unlimited
730: * for the number of time steps with the current dimensions for the
731: * other dimensions; so only additional time steps can be added).
732: *
733: * chunkDims - holds the size of a single time step (required to
734: * permit extending dataset).
735: */
736: dim = 0;
737: chunksize = 1;
738: if (timestep >= 0) {
739: dims[dim] = timestep+1;
740: maxDims[dim] = H5S_UNLIMITED;
741: chunkDims[dim] = 1;
742: ++dim;
743: }
744: PetscHDF5IntCast(xin->map->N/bs,dims + dim);
746: maxDims[dim] = dims[dim];
747: chunkDims[dim] = dims[dim];
748: chunksize *= chunkDims[dim];
749: ++dim;
750: if (bs > 1 || dim2) {
751: dims[dim] = bs;
752: maxDims[dim] = dims[dim];
753: chunkDims[dim] = dims[dim];
754: chunksize *= chunkDims[dim];
755: ++dim;
756: }
757: #if defined(PETSC_USE_COMPLEX)
758: dims[dim] = 2;
759: maxDims[dim] = dims[dim];
760: chunkDims[dim] = dims[dim];
761: chunksize *= chunkDims[dim];
762: /* hdf5 chunks must be less than the max of 32 bit int */
763: if (chunksize > PETSC_HDF5_INT_MAX/64 ) {
764: if (bs > 1 || dim2) {
765: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
766: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
767: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
768: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
769: }
770: } else {
771: chunkDims[dim-1] = PETSC_HDF5_INT_MAX/128;
772: }
773: }
774: ++dim;
775: #else
776: /* hdf5 chunks must be less than the max of 32 bit int */
777: if (chunksize > PETSC_HDF5_INT_MAX/64) {
778: if (bs > 1 || dim2) {
779: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
780: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
781: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
782: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
783: }
784: } else {
785: chunkDims[dim-1] = PETSC_HDF5_INT_MAX/64;
786: }
787: }
788: #endif
791: PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));
793: #if defined(PETSC_USE_REAL_SINGLE)
794: memscalartype = H5T_NATIVE_FLOAT;
795: filescalartype = H5T_NATIVE_FLOAT;
796: #elif defined(PETSC_USE_REAL___FLOAT128)
797: #error "HDF5 output with 128 bit floats not supported."
798: #elif defined(PETSC_USE_REAL___FP16)
799: #error "HDF5 output with 16 bit floats not supported."
800: #else
801: memscalartype = H5T_NATIVE_DOUBLE;
802: if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
803: else filescalartype = H5T_NATIVE_DOUBLE;
804: #endif
806: /* Create the dataset with default properties and close filespace */
807: PetscObjectGetName((PetscObject) xin, &vecname);
808: if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
809: /* Create chunk */
810: PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
811: PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));
813: PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
814: PetscStackCallHDF5(H5Pclose,(chunkspace));
815: } else {
816: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
817: PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
818: }
819: PetscStackCallHDF5(H5Sclose,(filespace));
821: /* Each process defines a dataset and writes it to the hyperslab in the file */
822: dim = 0;
823: if (timestep >= 0) {
824: count[dim] = 1;
825: ++dim;
826: }
827: PetscHDF5IntCast(xin->map->n/bs,count + dim);
828: ++dim;
829: if (bs > 1 || dim2) {
830: count[dim] = bs;
831: ++dim;
832: }
833: #if defined(PETSC_USE_COMPLEX)
834: count[dim] = 2;
835: ++dim;
836: #endif
837: if (xin->map->n > 0) {
838: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
839: } else {
840: /* Can't create dataspace with zero for any dimension, so create null dataspace. */
841: PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
842: }
844: /* Select hyperslab in the file */
845: VecGetOwnershipRange(xin, &low, NULL);
846: dim = 0;
847: if (timestep >= 0) {
848: offset[dim] = timestep;
849: ++dim;
850: }
851: PetscHDF5IntCast(low/bs,offset + dim);
852: ++dim;
853: if (bs > 1 || dim2) {
854: offset[dim] = 0;
855: ++dim;
856: }
857: #if defined(PETSC_USE_COMPLEX)
858: offset[dim] = 0;
859: ++dim;
860: #endif
861: if (xin->map->n > 0) {
862: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
863: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
864: } else {
865: /* Create null filespace to match null memspace. */
866: PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
867: }
869: /* Create property list for collective dataset write */
870: PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
871: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
872: PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
873: #endif
874: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
876: VecGetArrayRead(xin, &x);
877: PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x));
878: PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
879: VecRestoreArrayRead(xin, &x);
881: /* Close/release resources */
882: PetscStackCallHDF5(H5Gclose,(group));
883: PetscStackCallHDF5(H5Pclose,(plist_id));
884: PetscStackCallHDF5(H5Sclose,(filespace));
885: PetscStackCallHDF5(H5Sclose,(memspace));
886: PetscStackCallHDF5(H5Dclose,(dset_id));
888: #if defined(PETSC_USE_COMPLEX)
889: {
890: PetscBool tru = PETSC_TRUE;
891: PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"complex",PETSC_BOOL,&tru);
892: }
893: #endif
894: PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
895: return(0);
896: }
897: #endif
899: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
900: {
902: PetscBool iascii,isbinary,isdraw;
903: #if defined(PETSC_HAVE_MATHEMATICA)
904: PetscBool ismathematica;
905: #endif
906: #if defined(PETSC_HAVE_HDF5)
907: PetscBool ishdf5;
908: #endif
909: #if defined(PETSC_HAVE_MATLAB_ENGINE)
910: PetscBool ismatlab;
911: #endif
912: #if defined(PETSC_HAVE_ADIOS)
913: PetscBool isadios;
914: #endif
915: #if defined(PETSC_HAVE_ADIOS2)
916: PetscBool isadios2;
917: #endif
918: PetscBool isglvis;
921: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
922: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
923: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
924: #if defined(PETSC_HAVE_MATHEMATICA)
925: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
926: #endif
927: #if defined(PETSC_HAVE_HDF5)
928: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
929: #endif
930: #if defined(PETSC_HAVE_MATLAB_ENGINE)
931: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
932: #endif
933: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
934: #if defined(PETSC_HAVE_ADIOS)
935: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS,&isadios);
936: #endif
937: #if defined(PETSC_HAVE_ADIOS2)
938: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS2,&isadios2);
939: #endif
940: if (iascii) {
941: VecView_MPI_ASCII(xin,viewer);
942: } else if (isbinary) {
943: VecView_MPI_Binary(xin,viewer);
944: } else if (isdraw) {
945: PetscViewerFormat format;
946: PetscViewerGetFormat(viewer,&format);
947: if (format == PETSC_VIEWER_DRAW_LG) {
948: VecView_MPI_Draw_LG(xin,viewer);
949: } else {
950: VecView_MPI_Draw(xin,viewer);
951: }
952: #if defined(PETSC_HAVE_MATHEMATICA)
953: } else if (ismathematica) {
954: PetscViewerMathematicaPutVector(viewer,xin);
955: #endif
956: #if defined(PETSC_HAVE_HDF5)
957: } else if (ishdf5) {
958: VecView_MPI_HDF5(xin,viewer);
959: #endif
960: #if defined(PETSC_HAVE_ADIOS)
961: } else if (isadios) {
962: VecView_MPI_ADIOS(xin,viewer);
963: #endif
964: #if defined(PETSC_HAVE_ADIOS2)
965: } else if (isadios2) {
966: VecView_MPI_ADIOS2(xin,viewer);
967: #endif
968: #if defined(PETSC_HAVE_MATLAB_ENGINE)
969: } else if (ismatlab) {
970: VecView_MPI_Matlab(xin,viewer);
971: #endif
972: } else if (isglvis) {
973: VecView_GLVis(xin,viewer);
974: }
975: return(0);
976: }
978: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
979: {
981: *N = xin->map->N;
982: return(0);
983: }
985: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
986: {
987: const PetscScalar *xx;
988: PetscInt i,tmp,start = xin->map->range[xin->stash.rank];
989: PetscErrorCode ierr;
992: VecGetArrayRead(xin,&xx);
993: for (i=0; i<ni; i++) {
994: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
995: tmp = ix[i] - start;
996: #if defined(PETSC_USE_DEBUG)
997: if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
998: #endif
999: y[i] = xx[tmp];
1000: }
1001: VecRestoreArrayRead(xin,&xx);
1002: return(0);
1003: }
1005: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
1006: {
1008: PetscMPIInt rank = xin->stash.rank;
1009: PetscInt *owners = xin->map->range,start = owners[rank];
1010: PetscInt end = owners[rank+1],i,row;
1011: PetscScalar *xx;
1014: #if defined(PETSC_USE_DEBUG)
1015: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
1016: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
1017: #endif
1018: VecGetArray(xin,&xx);
1019: xin->stash.insertmode = addv;
1021: if (addv == INSERT_VALUES) {
1022: for (i=0; i<ni; i++) {
1023: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
1024: #if defined(PETSC_USE_DEBUG)
1025: if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
1026: #endif
1027: if ((row = ix[i]) >= start && row < end) {
1028: xx[row-start] = y[i];
1029: } else if (!xin->stash.donotstash) {
1030: #if defined(PETSC_USE_DEBUG)
1031: if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
1032: #endif
1033: VecStashValue_Private(&xin->stash,row,y[i]);
1034: }
1035: }
1036: } else {
1037: for (i=0; i<ni; i++) {
1038: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
1039: #if defined(PETSC_USE_DEBUG)
1040: if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
1041: #endif
1042: if ((row = ix[i]) >= start && row < end) {
1043: xx[row-start] += y[i];
1044: } else if (!xin->stash.donotstash) {
1045: #if defined(PETSC_USE_DEBUG)
1046: if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
1047: #endif
1048: VecStashValue_Private(&xin->stash,row,y[i]);
1049: }
1050: }
1051: }
1052: VecRestoreArray(xin,&xx);
1053: return(0);
1054: }
1056: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
1057: {
1058: PetscMPIInt rank = xin->stash.rank;
1059: PetscInt *owners = xin->map->range,start = owners[rank];
1061: PetscInt end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
1062: PetscScalar *xx,*y = (PetscScalar*)yin;
1065: VecGetArray(xin,&xx);
1066: #if defined(PETSC_USE_DEBUG)
1067: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
1068: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
1069: #endif
1070: xin->stash.insertmode = addv;
1072: if (addv == INSERT_VALUES) {
1073: for (i=0; i<ni; i++) {
1074: if ((row = bs*ix[i]) >= start && row < end) {
1075: for (j=0; j<bs; j++) xx[row-start+j] = y[j];
1076: } else if (!xin->stash.donotstash) {
1077: if (ix[i] < 0) { y += bs; continue; }
1078: #if defined(PETSC_USE_DEBUG)
1079: if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1080: #endif
1081: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1082: }
1083: y += bs;
1084: }
1085: } else {
1086: for (i=0; i<ni; i++) {
1087: if ((row = bs*ix[i]) >= start && row < end) {
1088: for (j=0; j<bs; j++) xx[row-start+j] += y[j];
1089: } else if (!xin->stash.donotstash) {
1090: if (ix[i] < 0) { y += bs; continue; }
1091: #if defined(PETSC_USE_DEBUG)
1092: if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1093: #endif
1094: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1095: }
1096: y += bs;
1097: }
1098: }
1099: VecRestoreArray(xin,&xx);
1100: return(0);
1101: }
1103: /*
1104: Since nsends or nreceives may be zero we add 1 in certain mallocs
1105: to make sure we never malloc an empty one.
1106: */
1107: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1108: {
1110: PetscInt *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1111: PetscMPIInt size;
1112: InsertMode addv;
1113: MPI_Comm comm;
1116: PetscObjectGetComm((PetscObject)xin,&comm);
1117: if (xin->stash.donotstash) return(0);
1119: MPIU_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
1120: if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1121: xin->stash.insertmode = addv; /* in case this processor had no cache */
1122: xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */
1124: VecGetBlockSize(xin,&bs);
1125: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
1126: if (!xin->bstash.bowners && xin->map->bs != -1) {
1127: PetscMalloc1(size+1,&bowners);
1128: for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
1129: xin->bstash.bowners = bowners;
1130: } else bowners = xin->bstash.bowners;
1132: VecStashScatterBegin_Private(&xin->stash,owners);
1133: VecStashScatterBegin_Private(&xin->bstash,bowners);
1134: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1135: PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1136: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1137: PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1138: return(0);
1139: }
1141: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1142: {
1144: PetscInt base,i,j,*row,flg,bs;
1145: PetscMPIInt n;
1146: PetscScalar *val,*vv,*array,*xarray;
1149: if (!vec->stash.donotstash) {
1150: VecGetArray(vec,&xarray);
1151: VecGetBlockSize(vec,&bs);
1152: base = vec->map->range[vec->stash.rank];
1154: /* Process the stash */
1155: while (1) {
1156: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1157: if (!flg) break;
1158: if (vec->stash.insertmode == ADD_VALUES) {
1159: for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1160: } else if (vec->stash.insertmode == INSERT_VALUES) {
1161: for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1162: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1163: }
1164: VecStashScatterEnd_Private(&vec->stash);
1166: /* now process the block-stash */
1167: while (1) {
1168: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1169: if (!flg) break;
1170: for (i=0; i<n; i++) {
1171: array = xarray+row[i]*bs-base;
1172: vv = val+i*bs;
1173: if (vec->stash.insertmode == ADD_VALUES) {
1174: for (j=0; j<bs; j++) array[j] += vv[j];
1175: } else if (vec->stash.insertmode == INSERT_VALUES) {
1176: for (j=0; j<bs; j++) array[j] = vv[j];
1177: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1178: }
1179: }
1180: VecStashScatterEnd_Private(&vec->bstash);
1181: VecRestoreArray(vec,&xarray);
1182: }
1183: vec->stash.insertmode = NOT_SET_VALUES;
1184: return(0);
1185: }