conspire/math/optimize/newton_raphson/
mod.rs1#[cfg(test)]
2mod test;
3
4use super::{
5 super::{
6 Banded, Hessian, Jacobian, Matrix, Solution, SquareMatrix, Tensor, TensorRank0, TensorVec,
7 Vector,
8 },
9 EqualityConstraint, FirstOrderRootFinding, OptimizeError, SecondOrderOptimization,
10};
11use crate::ABS_TOL;
12use std::ops::{Div, Mul};
13
14#[derive(Debug)]
16pub struct NewtonRaphson {
17 pub abs_tol: TensorRank0,
19 pub check_minimum: bool,
21 pub max_steps: usize,
23}
24
25impl Default for NewtonRaphson {
26 fn default() -> Self {
27 Self {
28 abs_tol: ABS_TOL,
29 check_minimum: true,
30 max_steps: 250,
31 }
32 }
33}
34
35impl<F, J, X> FirstOrderRootFinding<F, J, X> for NewtonRaphson
36where
37 F: Jacobian + Div<J, Output = X>,
38 J: Hessian,
39 X: Solution,
40 Vector: From<X>,
41 for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
42{
43 fn root(
44 &self,
45 function: impl Fn(&X) -> Result<F, OptimizeError>,
46 jacobian: impl Fn(&X) -> Result<J, OptimizeError>,
47 initial_guess: X,
48 equality_constraint: EqualityConstraint,
49 ) -> Result<X, OptimizeError> {
50 match equality_constraint {
51 EqualityConstraint::Linear(constraint_matrix, constraint_rhs) => {
52 let num_variables = initial_guess.num_entries();
53 let num_constraints = constraint_rhs.len();
54 let num_total = num_variables + num_constraints;
55 let mut multipliers = Vector::ones(num_constraints) * self.abs_tol;
56 let mut residual = Vector::zero(num_total);
57 let mut solution = initial_guess;
58 let mut tangent = SquareMatrix::zero(num_total);
59 for _ in 0..self.max_steps {
60 (function(&solution)? - &multipliers * &constraint_matrix).fill_into_chained(
61 &constraint_rhs - &constraint_matrix * &solution,
62 &mut residual,
63 );
64 jacobian(&solution)?.fill_into(&mut tangent);
65 constraint_matrix
66 .iter()
67 .enumerate()
68 .for_each(|(i, constraint_matrix_i)| {
69 constraint_matrix_i.iter().enumerate().for_each(
70 |(j, constraint_matrix_ij)| {
71 tangent[i + num_variables][j] = -constraint_matrix_ij;
72 tangent[j][i + num_variables] = -constraint_matrix_ij;
73 },
74 )
75 });
76 tangent
77 .iter_mut()
78 .skip(num_variables)
79 .for_each(|tangent_i| {
80 tangent_i
81 .iter_mut()
82 .skip(num_variables)
83 .for_each(|tangent_ij| *tangent_ij = 0.0)
84 });
85 if residual.norm() < self.abs_tol {
86 return Ok(solution);
87 } else {
88 solution
89 .decrement_from_chained(&mut multipliers, tangent.solve_lu(&residual)?)
90 }
91 }
92 }
93 EqualityConstraint::None => {
94 let mut residual;
95 let mut solution = initial_guess;
96 let mut tangent;
97 for _ in 0..self.max_steps {
98 residual = function(&solution)?;
99 tangent = jacobian(&solution)?;
100 if residual.norm() < self.abs_tol {
101 return Ok(solution);
102 } else {
103 solution -= residual / tangent;
104 }
105 }
106 }
107 }
108 Err(OptimizeError::MaximumStepsReached(
109 self.max_steps,
110 format!("{:?}", &self),
111 ))
112 }
113}
114
115impl<F, H, J, X> SecondOrderOptimization<F, H, J, X> for NewtonRaphson
116where
117 H: Hessian,
118 J: Jacobian + Div<H, Output = X>,
119 X: Solution,
120 Vector: From<X>,
121 for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
122{
123 fn minimize(
124 &self,
125 _function: impl Fn(&X) -> Result<F, OptimizeError>,
126 jacobian: impl Fn(&X) -> Result<J, OptimizeError>,
127 hessian: impl Fn(&X) -> Result<H, OptimizeError>,
128 initial_guess: X,
129 equality_constraint: EqualityConstraint,
130 banded: Option<Banded>,
131 ) -> Result<X, OptimizeError> {
132 match equality_constraint {
133 EqualityConstraint::Linear(constraint_matrix, constraint_rhs) => {
134 let num_variables = initial_guess.num_entries();
135 let num_constraints = constraint_rhs.len();
136 let num_total = num_variables + num_constraints;
137 let mut multipliers = Vector::ones(num_constraints) * self.abs_tol;
138 let mut residual = Vector::zero(num_total);
139 let mut solution = initial_guess;
140 let mut tangent = SquareMatrix::zero(num_total);
141 constraint_matrix
142 .iter()
143 .enumerate()
144 .for_each(|(i, constraint_matrix_i)| {
145 constraint_matrix_i.iter().enumerate().for_each(
146 |(j, constraint_matrix_ij)| {
147 tangent[i + num_variables][j] = -constraint_matrix_ij;
148 tangent[j][i + num_variables] = -constraint_matrix_ij;
149 },
150 )
151 });
152 for step in 0..self.max_steps {
153 (jacobian(&solution)? - &multipliers * &constraint_matrix).fill_into_chained(
154 &constraint_rhs - &constraint_matrix * &solution,
155 &mut residual,
156 );
157 hessian(&solution)?.fill_into(&mut tangent);
158 if residual.norm() < self.abs_tol {
159 return Ok(solution);
160 } else if let Some(ref band) = banded {
161 println!(
162 "\x1b[1;93m({}) SQP step convexity is not verified.\x1b[0m",
163 step + 1
164 );
165 solution.decrement_from_chained(
166 &mut multipliers,
167 tangent.solve_lu_banded(&residual, band)?,
168 )
169 } else {
170 println!(
171 "\x1b[1;93m({}) SQP step convexity is not verified.\x1b[0m",
172 step + 1
173 );
174 solution
175 .decrement_from_chained(&mut multipliers, tangent.solve_lu(&residual)?)
176 }
177 }
178 }
179 EqualityConstraint::None => {
180 let mut residual;
181 let mut solution = initial_guess;
182 let mut tangent;
183 for _ in 0..self.max_steps {
184 residual = jacobian(&solution)?;
185 tangent = hessian(&solution)?;
186 if residual.norm() < self.abs_tol {
187 if self.check_minimum && !tangent.is_positive_definite() {
188 return Err(OptimizeError::NotMinimum(
189 format!("{}", solution),
190 format!("{:?}", &self),
191 ));
192 } else {
193 return Ok(solution);
194 }
195 } else {
196 solution -= residual / tangent;
197 }
198 }
199 }
200 }
201 Err(OptimizeError::MaximumStepsReached(
202 self.max_steps,
203 format!("{:?}", &self),
204 ))
205 }
206}