Actual source code: ex4.c


  2: static char help[] = "Chemo-taxis Problems from Mathematical Biology.\n";

  4: /*
  5:      Page 18, Chemo-taxis Problems from Mathematical Biology

  7:         rho_t =
  8:         c_t   =

 10:      Further discusson on Page 134 and in 2d on Page 409
 11: */

 13: /*

 15:    Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
 16:    Include "petscts.h" so that we can use SNES solvers.  Note that this
 17:    file automatically includes:
 18:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 19:      petscmat.h - matrices
 20:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 21:      petscviewer.h - viewers               petscpc.h  - preconditioners
 22:      petscksp.h   - linear solvers
 23: */

 25: #include <petscdm.h>
 26: #include <petscdmda.h>
 27: #include <petscts.h>

 29: typedef struct {
 30:   PetscScalar rho,c;
 31: } Field;

 33: typedef struct {
 34:   PetscScalar epsilon,delta,alpha,beta,gamma,kappa,lambda,mu,cstar;
 35:   PetscBool   upwind;
 36: } AppCtx;

 38: /*
 39:    User-defined routines
 40: */
 41: extern PetscErrorCode IFunction(TS,PetscReal,Vec,Vec,Vec,void*),InitialConditions(DM,Vec);

 43: int main(int argc,char **argv)
 44: {
 45:   TS             ts;                  /* nonlinear solver */
 46:   Vec            U;                   /* solution, residual vectors */
 47:   DM             da;
 48:   AppCtx         appctx;

 50:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 51:      Initialize program
 52:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 53:   PetscInitialize(&argc,&argv,(char*)0,help);

 55:   appctx.epsilon = 1.0e-3;
 56:   appctx.delta   = 1.0;
 57:   appctx.alpha   = 10.0;
 58:   appctx.beta    = 4.0;
 59:   appctx.gamma   = 1.0;
 60:   appctx.kappa   = .75;
 61:   appctx.lambda  = 1.0;
 62:   appctx.mu      = 100.;
 63:   appctx.cstar   = .2;
 64:   appctx.upwind  = PETSC_TRUE;

 66:   PetscOptionsGetScalar(NULL,NULL,"-delta",&appctx.delta,NULL);
 67:   PetscOptionsGetBool(NULL,NULL,"-upwind",&appctx.upwind,NULL);

 69:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 70:      Create distributed array (DMDA) to manage parallel grid and vectors
 71:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 72:   DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE,8,2,1,NULL,&da);
 73:   DMSetFromOptions(da);
 74:   DMSetUp(da);
 75:   DMDASetFieldName(da,0,"rho");
 76:   DMDASetFieldName(da,1,"c");

 78:   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 79:      Extract global vectors from DMDA; then duplicate for remaining
 80:      vectors that are the same types
 81:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 82:   DMCreateGlobalVector(da,&U);

 84:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 85:      Create timestepping solver context
 86:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 87:   TSCreate(PETSC_COMM_WORLD,&ts);
 88:   TSSetType(ts,TSROSW);
 89:   TSSetDM(ts,da);
 90:   TSSetProblemType(ts,TS_NONLINEAR);
 91:   TSSetIFunction(ts,NULL,IFunction,&appctx);

 93:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 94:      Set initial conditions
 95:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 96:   InitialConditions(da,U);
 97:   TSSetSolution(ts,U);

 99:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100:      Set solver options
101:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
102:   TSSetTimeStep(ts,.0001);
103:   TSSetMaxTime(ts,1.0);
104:   TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
105:   TSSetFromOptions(ts);

107:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108:      Solve nonlinear system
109:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110:   TSSolve(ts,U);

112:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
113:      Free work space.  All PETSc objects should be destroyed when they
114:      are no longer needed.
115:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
116:   VecDestroy(&U);
117:   TSDestroy(&ts);
118:   DMDestroy(&da);

120:   PetscFinalize();
121:   return 0;
122: }
123: /* ------------------------------------------------------------------- */
124: /*
125:    IFunction - Evaluates nonlinear function, F(U).

127:    Input Parameters:
128: .  ts - the TS context
129: .  U - input vector
130: .  ptr - optional user-defined context, as set by SNESSetFunction()

132:    Output Parameter:
133: .  F - function vector
134:  */
135: PetscErrorCode IFunction(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
136: {
137:   AppCtx         *appctx = (AppCtx*)ptr;
138:   DM             da;
139:   PetscInt       i,Mx,xs,xm;
140:   PetscReal      hx,sx;
141:   PetscScalar    rho,c,rhoxx,cxx,cx,rhox,kcxrhox;
142:   Field          *u,*f,*udot;
143:   Vec            localU;

145:   TSGetDM(ts,&da);
146:   DMGetLocalVector(da,&localU);
147:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

149:   hx = 1.0/(PetscReal)(Mx-1); sx = 1.0/(hx*hx);

151:   /*
152:      Scatter ghost points to local vector,using the 2-step process
153:         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
154:      By placing code between these two statements, computations can be
155:      done while messages are in transition.
156:   */
157:   DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
158:   DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);

160:   /*
161:      Get pointers to vector data
162:   */
163:   DMDAVecGetArrayRead(da,localU,&u);
164:   DMDAVecGetArrayRead(da,Udot,&udot);
165:   DMDAVecGetArrayWrite(da,F,&f);

167:   /*
168:      Get local grid boundaries
169:   */
170:   DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL);

172:   if (!xs) {
173:     f[0].rho = udot[0].rho; /* u[0].rho - 0.0; */
174:     f[0].c   = udot[0].c; /* u[0].c   - 1.0; */
175:     xs++;
176:     xm--;
177:   }
178:   if (xs+xm == Mx) {
179:     f[Mx-1].rho = udot[Mx-1].rho; /* u[Mx-1].rho - 1.0; */
180:     f[Mx-1].c   = udot[Mx-1].c;  /* u[Mx-1].c   - 0.0;  */
181:     xm--;
182:   }

184:   /*
185:      Compute function over the locally owned part of the grid
186:   */
187:   for (i=xs; i<xs+xm; i++) {
188:     rho   = u[i].rho;
189:     rhoxx = (-2.0*rho + u[i-1].rho + u[i+1].rho)*sx;
190:     c     = u[i].c;
191:     cxx   = (-2.0*c + u[i-1].c + u[i+1].c)*sx;

193:     if (!appctx->upwind) {
194:       rhox    = .5*(u[i+1].rho - u[i-1].rho)/hx;
195:       cx      = .5*(u[i+1].c - u[i-1].c)/hx;
196:       kcxrhox = appctx->kappa*(cxx*rho + cx*rhox);
197:     } else {
198:       kcxrhox = appctx->kappa*((u[i+1].c - u[i].c)*u[i+1].rho - (u[i].c - u[i-1].c)*u[i].rho)*sx;
199:     }

201:     f[i].rho = udot[i].rho - appctx->epsilon*rhoxx + kcxrhox  - appctx->mu*PetscAbsScalar(rho)*(1.0 - rho)*PetscMax(0,PetscRealPart(c - appctx->cstar)) + appctx->beta*rho;
202:     f[i].c   = udot[i].c - appctx->delta*cxx + appctx->lambda*c + appctx->alpha*rho*c/(appctx->gamma + c);
203:   }

205:   /*
206:      Restore vectors
207:   */
208:   DMDAVecRestoreArrayRead(da,localU,&u);
209:   DMDAVecRestoreArrayRead(da,Udot,&udot);
210:   DMDAVecRestoreArrayWrite(da,F,&f);
211:   DMRestoreLocalVector(da,&localU);
212:   return 0;
213: }

215: /* ------------------------------------------------------------------- */
216: PetscErrorCode InitialConditions(DM da,Vec U)
217: {
218:   PetscInt       i,xs,xm,Mx;
219:   Field          *u;
220:   PetscReal      hx,x;

222:   DMDAGetInfo(da,PETSC_IGNORE,&Mx,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);

224:   hx = 1.0/(PetscReal)(Mx-1);

226:   /*
227:      Get pointers to vector data
228:   */
229:   DMDAVecGetArrayWrite(da,U,&u);

231:   /*
232:      Get local grid boundaries
233:   */
234:   DMDAGetCorners(da,&xs,NULL,NULL,&xm,NULL,NULL);

236:   /*
237:      Compute function over the locally owned part of the grid
238:   */
239:   for (i=xs; i<xs+xm; i++) {
240:     x = i*hx;
241:     if (i < Mx-1) u[i].rho = 0.0;
242:     else          u[i].rho = 1.0;
243:     u[i].c = PetscCosReal(.5*PETSC_PI*x);
244:   }

246:   /*
247:      Restore vectors
248:   */
249:   DMDAVecRestoreArrayWrite(da,U,&u);
250:   return 0;
251: }

253: /*TEST

255:    test:
256:       args: -pc_type lu -da_refine 2  -ts_view  -ts_monitor -ts_max_time 1
257:       requires: double

259:    test:
260:      suffix: 2
261:      args:  -pc_type lu -da_refine 2  -ts_view  -ts_monitor_draw_solution -ts_monitor -ts_max_time 1
262:      requires: x double
263:      output_file: output/ex4_1.out

265: TEST*/