Actual source code: ex6.c

  1: static char help[] = "Tests various 3-dimensional DMDA routines.\n\n";

  3: #include <petscdm.h>
  4: #include <petscdmda.h>
  5: #include <petscao.h>

  7: int main(int argc,char **argv)
  8: {
  9:   PetscMPIInt      rank;
 10:   PetscInt         M = 3,N = 5,P=3,s=1,w=2,nloc,l,i,j,k,kk,m = PETSC_DECIDE,n = PETSC_DECIDE,p = PETSC_DECIDE;
 11:   PetscInt         Xs,Xm,Ys,Ym,Zs,Zm,iloc,*iglobal;
 12:   const PetscInt   *ltog;
 13:   PetscInt         *lx        = NULL,*ly = NULL,*lz = NULL;
 14:   PetscBool        test_order = PETSC_FALSE;
 15:   DM               da;
 16:   PetscViewer      viewer;
 17:   Vec              local,global;
 18:   PetscScalar      value;
 19:   DMBoundaryType   bx           = DM_BOUNDARY_NONE,by = DM_BOUNDARY_NONE,bz = DM_BOUNDARY_NONE;
 20:   DMDAStencilType  stencil_type = DMDA_STENCIL_BOX;
 21:   AO               ao;
 22:   PetscBool        flg = PETSC_FALSE;

 24:   PetscInitialize(&argc,&argv,(char*)0,help);
 25:   PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,300,&viewer);

 27:   /* Read options */
 28:   PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL);
 29:   PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL);
 30:   PetscOptionsGetInt(NULL,NULL,"-NZ",&P,NULL);
 31:   PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
 32:   PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
 33:   PetscOptionsGetInt(NULL,NULL,"-p",&p,NULL);
 34:   PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL);
 35:   PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL);
 36:   flg  = PETSC_FALSE;
 37:   PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL);
 38:   if (flg) stencil_type =  DMDA_STENCIL_STAR;
 39:   flg  = PETSC_FALSE;
 40:   PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL);
 41:   if (flg) stencil_type =  DMDA_STENCIL_BOX;

 43:   flg  = PETSC_FALSE;
 44:   PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL);
 45:   if (flg) bx = DM_BOUNDARY_PERIODIC;
 46:   flg  = PETSC_FALSE;
 47:   PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL);
 48:   if (flg) bx = DM_BOUNDARY_GHOSTED;
 49:   flg  = PETSC_FALSE;
 50:   PetscOptionsGetBool(NULL,NULL,"-xnonghosted",&flg,NULL);

 52:   flg  = PETSC_FALSE;
 53:   PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL);
 54:   if (flg) by = DM_BOUNDARY_PERIODIC;
 55:   flg  = PETSC_FALSE;
 56:   PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL);
 57:   if (flg) by = DM_BOUNDARY_GHOSTED;
 58:   flg  = PETSC_FALSE;
 59:   PetscOptionsGetBool(NULL,NULL,"-ynonghosted",&flg,NULL);

 61:   flg  = PETSC_FALSE;
 62:   PetscOptionsGetBool(NULL,NULL,"-zperiodic",&flg,NULL);
 63:   if (flg) bz = DM_BOUNDARY_PERIODIC;
 64:   flg  = PETSC_FALSE;
 65:   PetscOptionsGetBool(NULL,NULL,"-zghosted",&flg,NULL);
 66:   if (flg) bz = DM_BOUNDARY_GHOSTED;
 67:   flg  = PETSC_FALSE;
 68:   PetscOptionsGetBool(NULL,NULL,"-znonghosted",&flg,NULL);

 70:   PetscOptionsGetBool(NULL,NULL,"-testorder",&test_order,NULL);

 72:   flg  = PETSC_FALSE;
 73:   PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL);
 74:   if (flg) {
 76:     PetscMalloc1(m,&lx);
 77:     for (i=0; i<m-1; i++) lx[i] = 4;
 78:     lx[m-1] = M - 4*(m-1);
 80:     PetscMalloc1(n,&ly);
 81:     for (i=0; i<n-1; i++) ly[i] = 2;
 82:     ly[n-1] = N - 2*(n-1);
 84:     PetscMalloc1(p,&lz);
 85:     for (i=0; i<p-1; i++) lz[i] = 2;
 86:     lz[p-1] = P - 2*(p-1);
 87:   }

 89:   /* Create distributed array and get vectors */
 90:   DMDACreate3d(PETSC_COMM_WORLD,bx,by,bz,stencil_type,M,N,P,m,n,p,w,s,lx,ly,lz,&da);
 91:   DMSetFromOptions(da);
 92:   DMSetUp(da);
 93:   PetscFree(lx);
 94:   PetscFree(ly);
 95:   PetscFree(lz);
 96:   DMView(da,viewer);
 97:   DMCreateGlobalVector(da,&global);
 98:   DMCreateLocalVector(da,&local);

100:   /* Set global vector; send ghost points to local vectors */
101:   value = 1;
102:   VecSet(global,value);
103:   DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
104:   DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);

106:   /* Scale local vectors according to processor rank; pass to global vector */
107:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
108:   value = rank;
109:   VecScale(local,value);
110:   DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);
111:   DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);

113:   if (!test_order) { /* turn off printing when testing ordering mappings */
114:     if (M*N*P<40) {
115:       PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vector:\n");
116:       VecView(global,PETSC_VIEWER_STDOUT_WORLD);
117:       PetscPrintf(PETSC_COMM_WORLD,"\n");
118:     }
119:   }

121:   /* Send ghost points to local vectors */
122:   DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
123:   DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);

125:   flg  = PETSC_FALSE;
126:   PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL);
127:   if (flg) {
128:     PetscViewer sviewer;
129:     PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);
130:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
131:     PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
132:     VecView(local,sviewer);
133:     PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
134:     PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
135:     PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);
136:   }

138:   /* Tests mappings between application/PETSc orderings */
139:   if (test_order) {
140:     ISLocalToGlobalMapping ltogm;

142:     DMGetLocalToGlobalMapping(da,&ltogm);
143:     ISLocalToGlobalMappingGetSize(ltogm,&nloc);
144:     ISLocalToGlobalMappingGetIndices(ltogm,&ltog);

146:     DMDAGetGhostCorners(da,&Xs,&Ys,&Zs,&Xm,&Ym,&Zm);
147:     DMDAGetAO(da,&ao);
148:     /* AOView(ao,PETSC_VIEWER_STDOUT_WORLD); */
149:     PetscMalloc1(nloc,&iglobal);

151:     /* Set iglobal to be global indices for each processor's local and ghost nodes,
152:        using the DMDA ordering of grid points */
153:     kk = 0;
154:     for (k=Zs; k<Zs+Zm; k++) {
155:       for (j=Ys; j<Ys+Ym; j++) {
156:         for (i=Xs; i<Xs+Xm; i++) {
157:           iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
158:           for (l=0; l<w; l++) {
159:             iglobal[kk++] = ltog[iloc+l];
160:           }
161:         }
162:       }
163:     }

165:     /* Map this to the application ordering (which for DMDAs is just the natural ordering
166:        that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
167:     AOPetscToApplication(ao,nloc,iglobal);

169:     /* Then map the application ordering back to the PETSc DMDA ordering */
170:     AOApplicationToPetsc(ao,nloc,iglobal);

172:     /* Verify the mappings */
173:     kk=0;
174:     for (k=Zs; k<Zs+Zm; k++) {
175:       for (j=Ys; j<Ys+Ym; j++) {
176:         for (i=Xs; i<Xs+Xm; i++) {
177:           iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
178:           for (l=0; l<w; l++) {
179:             if (iglobal[kk] != ltog[iloc+l]) {
180:               PetscPrintf(MPI_COMM_WORLD,"[%D] Problem with mapping: z=%D, j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",rank,k,j,i,l,ltog[iloc+l],iglobal[kk]);
181:             }
182:             kk++;
183:           }
184:         }
185:       }
186:     }
187:     PetscFree(iglobal);
188:     ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);
189:   }

191:   /* Free memory */
192:   PetscViewerDestroy(&viewer);
193:   VecDestroy(&local);
194:   VecDestroy(&global);
195:   DMDestroy(&da);
196:   PetscFinalize();
197:   return 0;
198: }

200: /*TEST

202:     test:
203:       args:  -testorder -nox

205:  TEST*/