Actual source code: ex5opt_ic.c
1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
3: /*
4: See ex5.c for details on the equation.
5: This code demonestrates the TSAdjoint/TAO interface to solve an inverse initial value problem built on a system of
6: time-dependent partial differential equations.
7: In this problem, the initial value for the PDE is unknown, but the output (the final solution of the PDE) is known.
8: We want to determine the initial value that can produce the given output.
9: We formulate the problem as a nonlinear optimization problem that minimizes the discrepency between the simulated
10: result and given reference solution, calculate the gradient of the objective function with the discrete adjoint
11: solver, and solve the optimization problem with TAO.
13: Runtime options:
14: -forwardonly - run only the forward simulation
15: -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
16: */
18: #include "reaction_diffusion.h"
19: #include <petscdm.h>
20: #include <petscdmda.h>
21: #include <petsctao.h>
23: /* User-defined routines */
24: extern PetscErrorCode FormFunctionAndGradient(Tao,Vec,PetscReal*,Vec,void*);
26: /*
27: Set terminal condition for the adjoint variable
28: */
29: PetscErrorCode InitializeLambda(DM da,Vec lambda,Vec U,AppCtx *appctx)
30: {
31: char filename[PETSC_MAX_PATH_LEN]="";
32: PetscViewer viewer;
33: Vec Uob;
37: VecDuplicate(U,&Uob);
38: PetscSNPrintf(filename,sizeof filename,"ex5opt.ob");
39: PetscViewerBinaryOpen(PETSC_COMM_WORLD,filename,FILE_MODE_READ,&viewer);
40: VecLoad(Uob,viewer);
41: PetscViewerDestroy(&viewer);
42: VecAYPX(Uob,-1.,U);
43: VecScale(Uob,2.0);
44: VecAXPY(lambda,1.,Uob);
45: VecDestroy(&Uob);
46: return(0);
47: }
49: /*
50: Set up a viewer that dumps data into a binary file
51: */
52: PetscErrorCode OutputBIN(DM da, const char *filename, PetscViewer *viewer)
53: {
57: PetscViewerCreate(PetscObjectComm((PetscObject)da),viewer);
58: PetscViewerSetType(*viewer,PETSCVIEWERBINARY);
59: PetscViewerFileSetMode(*viewer,FILE_MODE_WRITE);
60: PetscViewerFileSetName(*viewer,filename);
61: return(0);
62: }
64: /*
65: Generate a reference solution and save it to a binary file
66: */
67: PetscErrorCode GenerateOBs(TS ts,Vec U,AppCtx *appctx)
68: {
69: char filename[PETSC_MAX_PATH_LEN] = "";
70: PetscViewer viewer;
71: DM da;
75: TSGetDM(ts,&da);
76: TSSolve(ts,U);
77: PetscSNPrintf(filename,sizeof filename,"ex5opt.ob");
78: OutputBIN(da,filename,&viewer);
79: VecView(U,viewer);
80: PetscViewerDestroy(&viewer);
81: return(0);
82: }
84: PetscErrorCode InitialConditions(DM da,Vec U)
85: {
87: PetscInt i,j,xs,ys,xm,ym,Mx,My;
88: Field **u;
89: PetscReal hx,hy,x,y;
92: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
94: hx = 2.5/(PetscReal)Mx;
95: hy = 2.5/(PetscReal)My;
97: DMDAVecGetArray(da,U,&u);
98: /* Get local grid boundaries */
99: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
101: /* Compute function over the locally owned part of the grid */
102: for (j=ys; j<ys+ym; j++) {
103: y = j*hy;
104: for (i=xs; i<xs+xm; i++) {
105: x = i*hx;
106: if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
107: else u[j][i].v = 0.0;
109: u[j][i].u = 1.0 - 2.0*u[j][i].v;
110: }
111: }
113: DMDAVecRestoreArray(da,U,&u);
114: return(0);
115: }
117: PetscErrorCode PerturbedInitialConditions(DM da,Vec U)
118: {
120: PetscInt i,j,xs,ys,xm,ym,Mx,My;
121: Field **u;
122: PetscReal hx,hy,x,y;
125: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
127: hx = 2.5/(PetscReal)Mx;
128: hy = 2.5/(PetscReal)My;
130: DMDAVecGetArray(da,U,&u);
131: /* Get local grid boundaries */
132: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
134: /* Compute function over the locally owned part of the grid */
135: for (j=ys; j<ys+ym; j++) {
136: y = j*hy;
137: for (i=xs; i<xs+xm; i++) {
138: x = i*hx;
139: if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*0.5*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*0.5*y),2.0);
140: else u[j][i].v = 0.0;
142: u[j][i].u = 1.0 - 2.0*u[j][i].v;
143: }
144: }
146: DMDAVecRestoreArray(da,U,&u);
147: return(0);
148: }
150: PetscErrorCode PerturbedInitialConditions2(DM da,Vec U)
151: {
153: PetscInt i,j,xs,ys,xm,ym,Mx,My;
154: Field **u;
155: PetscReal hx,hy,x,y;
158: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
160: hx = 2.5/(PetscReal)Mx;
161: hy = 2.5/(PetscReal)My;
163: DMDAVecGetArray(da,U,&u);
164: /* Get local grid boundaries */
165: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
167: /* Compute function over the locally owned part of the grid */
168: for (j=ys; j<ys+ym; j++) {
169: y = j*hy;
170: for (i=xs; i<xs+xm; i++) {
171: x = i*hx;
172: if (PetscGTE(x,1.0) && PetscLTE(x,1.5) && PetscGTE(y,1.0) && PetscLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*(x-0.5)),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
173: else u[j][i].v = 0.0;
175: u[j][i].u = 1.0 - 2.0*u[j][i].v;
176: }
177: }
179: DMDAVecRestoreArray(da,U,&u);
180: return(0);
181: }
183: PetscErrorCode PerturbedInitialConditions3(DM da,Vec U)
184: {
186: PetscInt i,j,xs,ys,xm,ym,Mx,My;
187: Field **u;
188: PetscReal hx,hy,x,y;
191: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
193: hx = 2.5/(PetscReal)Mx;
194: hy = 2.5/(PetscReal)My;
196: DMDAVecGetArray(da,U,&u);
197: /* Get local grid boundaries */
198: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
200: /* Compute function over the locally owned part of the grid */
201: for (j=ys; j<ys+ym; j++) {
202: y = j*hy;
203: for (i=xs; i<xs+xm; i++) {
204: x = i*hx;
205: if ((0.5 <= x) && (x <= 2.0) && (0.5 <= y) && (y <= 2.0)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
206: else u[j][i].v = 0.0;
208: u[j][i].u = 1.0 - 2.0*u[j][i].v;
209: }
210: }
212: DMDAVecRestoreArray(da,U,&u);
213: return(0);
214: }
216: int main(int argc,char **argv)
217: {
219: DM da;
220: AppCtx appctx;
221: PetscBool forwardonly = PETSC_FALSE,implicitform = PETSC_FALSE;
222: PetscInt perturbic = 1;
224: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
225: PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL);
226: PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL);
227: PetscOptionsGetInt(NULL,NULL,"-perturbic",&perturbic,NULL);
229: appctx.D1 = 8.0e-5;
230: appctx.D2 = 4.0e-5;
231: appctx.gamma = .024;
232: appctx.kappa = .06;
233: appctx.aijpc = PETSC_FALSE;
235: /* Create distributed array (DMDA) to manage parallel grid and vectors */
236: DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);
237: DMSetFromOptions(da);
238: DMSetUp(da);
239: DMDASetFieldName(da,0,"u");
240: DMDASetFieldName(da,1,"v");
242: /* Extract global vectors from DMDA; then duplicate for remaining
243: vectors that are the same types */
244: DMCreateGlobalVector(da,&appctx.U);
246: /* Create timestepping solver context */
247: TSCreate(PETSC_COMM_WORLD,&appctx.ts);
248: TSSetType(appctx.ts,TSCN);
249: TSSetDM(appctx.ts,da);
250: TSSetProblemType(appctx.ts,TS_NONLINEAR);
251: TSSetEquationType(appctx.ts,TS_EQ_ODE_EXPLICIT); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
252: if (!implicitform) {
253: TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);
254: TSSetRHSJacobian(appctx.ts,NULL,NULL,RHSJacobian,&appctx);
255: } else {
256: TSSetIFunction(appctx.ts,NULL,IFunction,&appctx);
257: TSSetIJacobian(appctx.ts,NULL,NULL,IJacobian,&appctx);
258: }
260: /* Set initial conditions */
261: InitialConditions(da,appctx.U);
262: TSSetSolution(appctx.ts,appctx.U);
264: /* Set solver options */
265: TSSetMaxTime(appctx.ts,2000.0);
266: TSSetTimeStep(appctx.ts,0.5);
267: TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);
268: TSSetFromOptions(appctx.ts);
270: GenerateOBs(appctx.ts,appctx.U,&appctx);
272: if (!forwardonly) {
273: Tao tao;
274: Vec P;
275: Vec lambda[1];
276: #if defined(PETSC_USE_LOG)
277: PetscLogStage opt_stage;
278: #endif
280: PetscLogStageRegister("Optimization",&opt_stage);
281: PetscLogStagePush(opt_stage);
282: if (perturbic == 1) {
283: PerturbedInitialConditions(da,appctx.U);
284: } else if (perturbic == 2) {
285: PerturbedInitialConditions2(da,appctx.U);
286: } else if (perturbic == 3) {
287: PerturbedInitialConditions3(da,appctx.U);
288: }
290: VecDuplicate(appctx.U,&lambda[0]);
291: TSSetCostGradients(appctx.ts,1,lambda,NULL);
293: /* Have the TS save its trajectory needed by TSAdjointSolve() */
294: TSSetSaveTrajectory(appctx.ts);
296: /* Create TAO solver and set desired solution method */
297: TaoCreate(PETSC_COMM_WORLD,&tao);
298: TaoSetType(tao,TAOBLMVM);
300: /* Set initial guess for TAO */
301: VecDuplicate(appctx.U,&P);
302: VecCopy(appctx.U,P);
303: TaoSetInitialVector(tao,P);
305: /* Set routine for function and gradient evaluation */
306: TaoSetObjectiveAndGradientRoutine(tao,FormFunctionAndGradient,&appctx);
308: /* Check for any TAO command line options */
309: TaoSetFromOptions(tao);
311: TaoSolve(tao);
312: TaoDestroy(&tao);
313: VecDestroy(&lambda[0]);
314: VecDestroy(&P);
315: PetscLogStagePop();
316: }
318: /* Free work space. All PETSc objects should be destroyed when they
319: are no longer needed. */
320: VecDestroy(&appctx.U);
321: TSDestroy(&appctx.ts);
322: DMDestroy(&da);
323: PetscFinalize();
324: return ierr;
325: }
327: /* ------------------------ TAO callbacks ---------------------------- */
329: /*
330: FormFunctionAndGradient - Evaluates the function and corresponding gradient.
332: Input Parameters:
333: tao - the Tao context
334: P - the input vector
335: ctx - optional user-defined context, as set by TaoSetObjectiveAndGradientRoutine()
337: Output Parameters:
338: f - the newly evaluated function
339: G - the newly evaluated gradient
340: */
341: PetscErrorCode FormFunctionAndGradient(Tao tao,Vec P,PetscReal *f,Vec G,void *ctx)
342: {
343: AppCtx *appctx = (AppCtx*)ctx;
344: PetscReal soberr,timestep;
345: Vec *lambda;
346: Vec SDiff;
347: DM da;
348: char filename[PETSC_MAX_PATH_LEN]="";
349: PetscViewer viewer;
353: TSSetTime(appctx->ts,0.0);
354: TSGetTimeStep(appctx->ts,×tep);
355: if (timestep<0) {
356: TSSetTimeStep(appctx->ts,-timestep);
357: }
358: TSSetStepNumber(appctx->ts,0);
359: TSSetFromOptions(appctx->ts);
361: VecDuplicate(P,&SDiff);
362: VecCopy(P,appctx->U);
363: TSGetDM(appctx->ts,&da);
364: *f = 0;
366: TSSolve(appctx->ts,appctx->U);
367: PetscSNPrintf(filename,sizeof filename,"ex5opt.ob");
368: PetscViewerBinaryOpen(PETSC_COMM_WORLD,filename,FILE_MODE_READ,&viewer);
369: VecLoad(SDiff,viewer);
370: PetscViewerDestroy(&viewer);
371: VecAYPX(SDiff,-1.,appctx->U);
372: VecDot(SDiff,SDiff,&soberr);
373: *f += soberr;
375: TSGetCostGradients(appctx->ts,NULL,&lambda,NULL);
376: VecSet(lambda[0],0.0);
377: InitializeLambda(da,lambda[0],appctx->U,appctx);
378: TSAdjointSolve(appctx->ts);
380: VecCopy(lambda[0],G);
382: VecDestroy(&SDiff);
383: return(0);
384: }
386: /*TEST
388: build:
389: depends: reaction_diffusion.c
390: requires: !complex !single
392: test:
393: args: -ts_max_steps 5 -ts_type rk -ts_rk_type 3 -ts_trajectory_type memory -tao_monitor -tao_view -tao_gatol 1e-6
394: output_file: output/ex5opt_ic_1.out
396: TEST*/