Actual source code: asils.c
1: #include "src/complementarity/impls/ssls/ssls.h"
2: // Context for ASXLS
3: // -- active-set - reduced matrices formed
4: // - inherit properties of original system
5: // -- semismooth (S) - function not differentiable
6: // - merit function continuously differentiable
7: // - Fischer-Burmeister reformulation of complementarity
8: // - Billups composition for two finite bounds
9: // -- infeasible (I) - iterates not guaranteed to remain within bounds
10: // -- feasible (F) - iterates guaranteed to remain within bounds
11: // -- linesearch (LS) - Armijo rule on direction
12: //
13: // Many other reformulations are possible and combinations of
14: // feasible/infeasible and linesearch/trust region are possible.
15: //
16: // Basic theory
17: // Fischer-Burmeister reformulation is semismooth with a continuously
18: // differentiable merit function and strongly semismooth if the F has
19: // lipschitz continuous derivatives.
20: //
21: // Every accumulation point generated by the algorithm is a stationary
22: // point for the merit function. Stationary points of the merit function
23: // are solutions of the complementarity problem if
24: // a. the stationary point has a BD-regular subdifferential, or
25: // b. the Schur complement F'/F'_ff is a P_0-matrix where ff is the
26: // index set corresponding to the free variables.
27: //
28: // If one of the accumulation points has a BD-regular subdifferential then
29: // a. the entire sequence converges to this accumulation point at
30: // a local q-superlinear rate
31: // b. if in addition the reformulation is strongly semismooth near
32: // this accumulation point, then the algorithm converges at a
33: // local q-quadratic rate.
34: //
35: // The theory for the feasible version follows from the feasible descent
36: // algorithm framework.
37: //
38: // References:
39: // Billups, "Algorithms for Complementarity Problems and Generalized
40: // Equations," Ph.D thesis, University of Wisconsin - Madison, 1995.
41: // De Luca, Facchinei, Kanzow, "A Semismooth Equation Approach to the
42: // Solution of Nonlinear Complementarity Problems," Mathematical
43: // Programming, 75, pages 407-439, 1996.
44: // Ferris, Kanzow, Munson, "Feasible Descent Algorithms for Mixed
45: // Complementarity Problems," Mathematical Programming, 86,
46: // pages 475-497, 1999.
47: // Fischer, "A Special Newton-type Optimization Method," Optimization,
48: // 24, pages 269-284, 1992
49: // Munson, Facchinei, Ferris, Fischer, Kanzow, "The Semismooth Algorithm
50: // for Large Scale Complementarity Problems," Technical Report 99-06,
51: // University of Wisconsin - Madison, 1999.
54: /*------------------------------------------------------------*/
57: static int TaoSolve_ASILS(TAO_SOLVER tao, void *solver)
58: {
59: TAO_SSLS *asls = (TAO_SSLS *)solver;
60: TaoVec *x, *l, *u, *ff, *dpsi, *d, *w, *t1, *t2, *da, *db;
61: TaoMat *J,*Jsub;
62: TaoVec *dxfree,*r1, *r2, *r3;
63: TaoIndexSet *FreeVariableSet,*FixedVariableSet;
64: // TaoLinearSolver *lsolver;
65: double psi, psi_full, ndpsi, normd, innerd, t=0;
66: double delta, rho;
67: int iter=0, info;
68: TaoInt nn, nf;
69: TaoTerminateReason reason;
70: TaoTruth flag;
72: TaoFunctionBegin;
74: // Assume that Setup has been called!
75: // Set the structure for the Jacobian and create a linear solver.
77: delta = asls->delta;
78: rho = asls->rho;
80: info = TaoGetSolution(tao, &x); CHKERRQ(info);
81: info = TaoGetVariableBounds(tao, &l, &u); CHKERRQ(info);
82: info = TaoEvaluateVariableBounds(tao,l,u); CHKERRQ(info);
83: info = TaoGetJacobian(tao, &J); CHKERRQ(info);
85: info = x->GetDimension(&nn); CHKERRQ(info);
87: info = x->CreateIndexSet(&FreeVariableSet); CHKERRQ(info);
88: info = x->CreateIndexSet(&FixedVariableSet); CHKERRQ(info);
89: info = J->CreateReducedMatrix(FreeVariableSet,FreeVariableSet,&Jsub); CHKERRQ(info);
90: info = x->Clone(&dxfree); CHKERRQ(info);
91: info = x->Clone(&r1); CHKERRQ(info);
92: info = x->Clone(&r2); CHKERRQ(info);
93: info = x->Clone(&r3); CHKERRQ(info);
95: ff = asls->ff;
96: dpsi = asls->dpsi;
97: d = asls->d;
98: w = asls->w;
99: t1 = asls->t1;
100: t2 = asls->t2;
101: da = asls->da;
102: db = asls->db;
104: info = TaoSetMeritFunction(tao, Tao_SSLS_Function, Tao_ASLS_FunctionGradient,
105: TAO_NULL, TAO_NULL, asls); CHKERRQ(info);
107: // Calculate the function value and fischer function value at the
108: // current iterate
110: info = TaoComputeMeritFunctionGradient(tao, x, &psi, dpsi); CHKERRQ(info);
111: info = dpsi->Norm2(&ndpsi); CHKERRQ(info);
113: while (1) {
115: // Check the termination criteria
117: info = TaoMonitor(tao, iter++, asls->merit, ndpsi, 0.0, t, &reason); CHKERRQ(info);
118: if (TAO_CONTINUE_ITERATING != reason) break;
120: // We are going to solve a linear system of equations. We need to
121: // set the tolerances for the solve so that we maintain an asymptotic
122: // rate of convergence that is superlinear.
123: // Note: these tolerances are for the reduced system. We really need
124: // to make sure that the full system satisfies the full-space conditions.
125:
126: // This rule gives superlinear asymptotic convergence
127: // asls->atol = TaoMin(0.5, asls->merit*sqrt(asls->merit));
128: // asls->rtol = 0.0;
130: // This rule gives quadratic asymptotic convergence
131: // asls->atol = TaoMin(0.5, asls->merit*asls->merit);
132: // asls->rtol = 0.0;
134: // Calculate a free and fixed set of variables. The fixed set of
135: // variables are those for the d_b is approximately equal to zero.
136: // The definition of approximately changes as we approach the solution
137: // to the problem.
139: // No one rule is guaranteed to work in all cases. The following
140: // definition is based on the norm of the Jacobian matrix. If the
141: // norm is large, the tolerance becomes smaller.
143: info = J->Norm1(&(asls->identifier)); CHKERRQ(info);
144: // asls->identifier = asls->atol / (1 + asls->identifier);
145: asls->identifier = TaoMin(asls->merit, 1e-2) / (1 + asls->identifier);
146: // asls->identifier = 1e-4 / (1 + asls->identifier);
147: // asls->identifier = 1e-4;
148: // asls->identifier = 1e-8;
150: info = t1->SetToConstant(-asls->identifier); CHKERRQ(info);
151: info = t2->SetToConstant( asls->identifier); CHKERRQ(info);
153: info = FixedVariableSet->WhichBetweenOrEqual(t1, db, t2); CHKERRQ(info);
154: info = FreeVariableSet->ComplementOf(FixedVariableSet); CHKERRQ(info);
156: info = FixedVariableSet->GetSize(&nf);
157: info = PetscPrintf(((PetscObject)tao)->comm,"Fixed size: %d\n", nf); CHKERRQ(info);
159: // We now have our partition. Now calculate the direction in the
160: // fixed variable space.
162: info = r1->SetReducedVec(ff, FixedVariableSet); CHKERRQ(info);
163: info = r2->SetReducedVec(da, FixedVariableSet); CHKERRQ(info);
164: info = r1->PointwiseDivide(r1, r2); CHKERRQ(info);
166: info = d->SetToZero(); CHKERRQ(info);
167: info = d->ReducedXPY(r1, FixedVariableSet); CHKERRQ(info);
169: // Our direction in the FixedVariableSet is fixed. Calculate the
170: // information needed for the step in the FreeVariableSet. To
171: // do this, we need to know the diagonal perturbation and the
172: // right hand side.
174: info = r1->SetReducedVec(da, FreeVariableSet); CHKERRQ(info);
175: info = r2->SetReducedVec(ff, FreeVariableSet); CHKERRQ(info);
176: info = r3->SetReducedVec(db, FreeVariableSet); CHKERRQ(info);
178: info = r1->PointwiseDivide(r1, r3); CHKERRQ(info);
179: info = r2->PointwiseDivide(r2, r3); CHKERRQ(info);
181: // r1 is the diagonal perturbation
182: // r2 is the right hand side
183: // r3 is no longer needed
185: // Now need to modify r2 for our direction choice in the fixed
186: // variable set: calculate t1 = J*d, take the reduced vector
187: // of t1 and modify r2.
189: info = J->Multiply(d, t1); CHKERRQ(info);
190: info = r3->SetReducedVec(t1, FreeVariableSet); CHKERRQ(info);
191: info = r2->Axpy(-1.0, r3);
193: // Calculate the reduced problem matrix and the direction
195: info = Jsub->SetReducedMatrix(J, FreeVariableSet, FreeVariableSet); CHKERRQ(info);
196: info = Jsub->AddDiagonal(r1); CHKERRQ(info);
197: info = dxfree->SetReducedVec(d, FreeVariableSet);CHKERRQ(info);
198: info = dxfree->SetToZero(); CHKERRQ(info);
200: // Set the convergence for the reduced problem solver.
201: // info = TaoGetLinearSolver(tao, &lsolver); CHKERRQ(info);
202: // info = lsolver->SetTolerances(0.1*asls->rtol, 0.1*asls->atol, 1e30, 10*nn); CHKERRQ(info);
204: // Calculate the reduced direction. (Really negative of Newton
205: // direction. Therefore, rest of the code uses -d.)
207: info = TaoPreLinearSolve(tao, Jsub); CHKERRQ(info);
208: info = TaoLinearSolve(tao, Jsub, r2, dxfree, &flag); CHKERRQ(info);
210: // Add the direction in the free variables back into the real direction.
212: info = d->ReducedXPY(dxfree, FreeVariableSet);CHKERRQ(info);
214: // Calculate the norm of the relative residual in the real space
216: // info = t1->PointwiseMultiply(da, d); CHKERRQ(info);
217: // info = J->Multiply(d, t2); CHKERRQ(info);
218: // info = t2->PointwiseMultiply(db, t2); CHKERRQ(info);
219: // info = t1->Axpy( 1.0, t2); CHKERRQ(info);
220: // info = t1->Axpy(-1.0, ff); CHKERRQ(info);
221: // info = t1->Norm2(&two_norm); CHKERRQ(info);
223: // if (two_norm >= asls->atol) {
224: // printf("Bad absolute residual: actual: %5.4e needed: %5.4e\n",
225: // two_norm, asls->atol);
226: // }
228: // Check the real direction for descent and if not, use the negative
229: // gradient direction.
231: info = d->Norm2(&normd); CHKERRQ(info);
232: info = d->Dot(dpsi, &innerd); CHKERRQ(info);
234: if (innerd <= delta*pow(normd, rho)) {
235: printf("Gradient direction: %5.4e.\n", innerd);
236: info = PetscInfo1(tao, "TaoSolve_ASILS: %d: newton direction not descent\n", iter); CHKERRQ(info);
238: info = d->CopyFrom(dpsi); CHKERRQ(info);
239: info = d->Dot(dpsi, &innerd); CHKERRQ(info);
240: }
242: info = d->Negate(); CHKERRQ(info);
243: innerd = -innerd;
245: // We now have a correct descent direction. Apply a linesearch to
246: // find the new iterate.
248: t = 1;
249: info = TaoLineSearchApply(tao, x, dpsi, d, w,
250: &psi, &psi_full, &t, &tao->lsflag); CHKERRQ(info);
251: info = dpsi->Norm2(&ndpsi); CHKERRQ(info);
252: }
254: info = TaoMatDestroy(Jsub);CHKERRQ(info);
255: info = TaoVecDestroy(dxfree);CHKERRQ(info);
256: info = TaoVecDestroy(r1);CHKERRQ(info);
257: info = TaoVecDestroy(r2);CHKERRQ(info);
258: info = TaoVecDestroy(r3);CHKERRQ(info);
259: info = TaoIndexSetDestroy(FreeVariableSet);CHKERRQ(info);
260: info = TaoIndexSetDestroy(FixedVariableSet);CHKERRQ(info);
262: TaoFunctionReturn(0);
263: }
265: /* ---------------------------------------------------------- */
269: int TaoCreate_ASILS(TAO_SOLVER tao)
270: {
271: TAO_SSLS *asls;
272: int info;
274: TaoFunctionBegin;
276: info = TaoNew(TAO_SSLS,&asls); CHKERRQ(info);
277: info = PetscLogObjectMemory(tao, sizeof(TAO_SSLS)); CHKERRQ(info);
279: asls->delta = 1e-10;
280: asls->rho = 2.1;
282: asls->identifier = 1e-5;
284: info=TaoSetTaoSolveRoutine(tao,TaoSolve_ASILS,(void*)asls); CHKERRQ(info);
285: info=TaoSetTaoSetUpDownRoutines(tao,TaoSetUp_SSLS,TaoSetDown_SSLS); CHKERRQ(info);
286: info=TaoSetTaoOptionsRoutine(tao,TaoSetOptions_SSLS); CHKERRQ(info);
287: info=TaoSetTaoViewRoutine(tao,TaoView_SSLS); CHKERRQ(info);
289: info = TaoCreateArmijoLineSearch(tao); CHKERRQ(info);
291: info = TaoSetMaximumIterates(tao,2000); CHKERRQ(info);
292: info = TaoSetMaximumFunctionEvaluations(tao,4000); CHKERRQ(info);
294: info = TaoSetTolerances(tao,0,0,0,0); CHKERRQ(info);
295: info = TaoSetGradientTolerances(tao,1.0e-16,0.0,0.0); CHKERRQ(info);
296: info = TaoSetFunctionLowerBound(tao,1.0e-8); CHKERRQ(info);
298: TaoFunctionReturn(0);
299: }