conspire/math/optimize/newton_raphson/
mod.rs

1#[cfg(test)]
2mod test;
3
4use super::{
5    super::{Banded, Hessian, Jacobian, Matrix, Scalar, Solution, SquareMatrix, Tensor, Vector},
6    BacktrackingLineSearch, EqualityConstraint, FirstOrderRootFinding, LineSearch,
7    OptimizationError, SecondOrderOptimization,
8};
9use crate::ABS_TOL;
10use std::{
11    fmt::{self, Debug, Formatter},
12    ops::{Div, Mul},
13};
14
15/// The Newton-Raphson method.
16pub struct NewtonRaphson {
17    /// Absolute error tolerance.
18    pub abs_tol: Scalar,
19    /// Line search algorithm.
20    pub line_search: LineSearch,
21    /// Maximum number of steps.
22    pub max_steps: usize,
23}
24
25impl<J, X> BacktrackingLineSearch<J, X> for NewtonRaphson {
26    fn get_line_search(&self) -> &LineSearch {
27        &self.line_search
28    }
29}
30
31impl Debug for NewtonRaphson {
32    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
33        write!(
34            f,
35            "NewtonRaphson {{ abs_tol: {:?}, line_search: {}, max_steps: {:?} }}",
36            self.abs_tol, self.line_search, self.max_steps
37        )
38    }
39}
40
41impl Default for NewtonRaphson {
42    fn default() -> Self {
43        Self {
44            abs_tol: ABS_TOL,
45            line_search: LineSearch::None,
46            max_steps: 25,
47        }
48    }
49}
50
51impl<F, J, X> FirstOrderRootFinding<F, J, X> for NewtonRaphson
52where
53    F: Jacobian,
54    for<'a> &'a F: Div<J, Output = X> + From<&'a X>,
55    J: Hessian,
56    X: Solution,
57    for<'a> &'a X: Mul<Scalar, Output = X>,
58    for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
59{
60    fn root(
61        &self,
62        function: impl Fn(&X) -> Result<F, String>,
63        jacobian: impl Fn(&X) -> Result<J, String>,
64        initial_guess: X,
65        equality_constraint: EqualityConstraint,
66    ) -> Result<X, OptimizationError> {
67        match equality_constraint {
68            EqualityConstraint::Fixed(indices) => constrained_fixed(
69                self,
70                |_: &X| panic!("No line search in root finding"),
71                function,
72                jacobian,
73                initial_guess,
74                None,
75                indices,
76            ),
77            EqualityConstraint::Linear(constraint_matrix, constraint_rhs) => constrained(
78                self,
79                |_: &X| panic!("No line search in root finding"),
80                function,
81                jacobian,
82                initial_guess,
83                None,
84                constraint_matrix,
85                constraint_rhs,
86            ),
87            EqualityConstraint::None => unconstrained(
88                self,
89                |_: &X| panic!("No line search in root finding"),
90                function,
91                jacobian,
92                initial_guess,
93            ),
94        }
95    }
96}
97
98impl<J, H, X> SecondOrderOptimization<Scalar, J, H, X> for NewtonRaphson
99where
100    H: Hessian,
101    J: Jacobian,
102    for<'a> &'a J: Div<H, Output = X> + From<&'a X>,
103    X: Solution,
104    for<'a> &'a X: Mul<Scalar, Output = X>,
105    for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
106{
107    fn minimize(
108        &self,
109        function: impl Fn(&X) -> Result<Scalar, String>,
110        jacobian: impl Fn(&X) -> Result<J, String>,
111        hessian: impl Fn(&X) -> Result<H, String>,
112        initial_guess: X,
113        equality_constraint: EqualityConstraint,
114        banded: Option<Banded>,
115    ) -> Result<X, OptimizationError> {
116        match match equality_constraint {
117            EqualityConstraint::Fixed(indices) => constrained_fixed(
118                self,
119                function,
120                jacobian,
121                hessian,
122                initial_guess,
123                banded,
124                indices,
125            ),
126            EqualityConstraint::Linear(constraint_matrix, constraint_rhs) => constrained(
127                self,
128                function,
129                jacobian,
130                hessian,
131                initial_guess,
132                banded,
133                constraint_matrix,
134                constraint_rhs,
135            ),
136            EqualityConstraint::None => {
137                unconstrained(self, function, jacobian, hessian, initial_guess)
138            }
139        } {
140            Ok(solution) => Ok(solution),
141            Err(error) => Err(OptimizationError::Upstream(
142                format!("{error}"),
143                format!("{self:?}"),
144            )),
145        }
146    }
147}
148
149fn unconstrained<J, H, X>(
150    newton_raphson: &NewtonRaphson,
151    function: impl Fn(&X) -> Result<Scalar, String>,
152    jacobian: impl Fn(&X) -> Result<J, String>,
153    hessian: impl Fn(&X) -> Result<H, String>,
154    initial_guess: X,
155) -> Result<X, OptimizationError>
156where
157    H: Hessian,
158    J: Jacobian,
159    for<'a> &'a J: Div<H, Output = X> + From<&'a X>,
160    X: Solution,
161    for<'a> &'a X: Mul<Scalar, Output = X>,
162{
163    let mut decrement;
164    let mut residual;
165    let mut solution = initial_guess;
166    let mut step_size;
167    let mut tangent;
168    for _ in 0..=newton_raphson.max_steps {
169        residual = jacobian(&solution)?;
170        if residual.norm_inf() < newton_raphson.abs_tol {
171            return Ok(solution);
172        } else {
173            tangent = hessian(&solution)?;
174            decrement = &residual / tangent;
175            step_size = newton_raphson.backtracking_line_search(
176                &function, &jacobian, &solution, &residual, &decrement, 1.0,
177            )?;
178            if step_size != 1.0 {
179                decrement *= step_size
180            }
181            solution -= decrement
182        }
183    }
184    Err(OptimizationError::MaximumStepsReached(
185        newton_raphson.max_steps,
186        format!("{:?}", &newton_raphson),
187    ))
188}
189
190#[allow(clippy::too_many_arguments)]
191fn constrained_fixed<J, H, X>(
192    newton_raphson: &NewtonRaphson,
193    function: impl Fn(&X) -> Result<Scalar, String>,
194    jacobian: impl Fn(&X) -> Result<J, String>,
195    hessian: impl Fn(&X) -> Result<H, String>,
196    initial_guess: X,
197    banded: Option<Banded>,
198    indices: Vec<usize>,
199) -> Result<X, OptimizationError>
200where
201    H: Hessian,
202    J: Jacobian,
203    for<'a> &'a J: From<&'a X>,
204    X: Solution,
205    for<'a> &'a X: Mul<Scalar, Output = X>,
206{
207    let mut retained = vec![true; initial_guess.size()];
208    indices.iter().for_each(|&index| retained[index] = false);
209    let mut decrement;
210    let mut residual;
211    let mut solution = initial_guess;
212    let mut step_size;
213    let mut tangent;
214    for _ in 0..=newton_raphson.max_steps {
215        residual = jacobian(&solution)?.retain_from(&retained);
216        if residual.norm_inf() < newton_raphson.abs_tol {
217            return Ok(solution);
218        } else if let Some(ref band) = banded {
219            tangent = hessian(&solution)?.retain_from(&retained);
220            decrement = tangent.solve_lu_banded(&residual, band)?
221        } else {
222            tangent = hessian(&solution)?.retain_from(&retained);
223            decrement = tangent.solve_lu(&residual)?
224        }
225        if !matches!(newton_raphson.line_search, LineSearch::None) {
226            let mut decrement_full = &solution * 0.0;
227            decrement_full.decrement_from_retained(&retained, &decrement);
228            decrement_full *= -1.0;
229            step_size = newton_raphson.backtracking_line_search(
230                &function,
231                &jacobian,
232                &solution,
233                &jacobian(&solution)?,
234                &decrement_full,
235                1.0,
236            )?;
237            if step_size != 1.0 {
238                decrement *= step_size
239            }
240        }
241        solution.decrement_from_retained(&retained, &decrement)
242    }
243    Err(OptimizationError::MaximumStepsReached(
244        newton_raphson.max_steps,
245        format!("{:?}", &newton_raphson),
246    ))
247}
248
249#[allow(clippy::too_many_arguments)]
250fn constrained<J, H, X>(
251    newton_raphson: &NewtonRaphson,
252    _function: impl Fn(&X) -> Result<Scalar, String>,
253    jacobian: impl Fn(&X) -> Result<J, String>,
254    hessian: impl Fn(&X) -> Result<H, String>,
255    initial_guess: X,
256    banded: Option<Banded>,
257    constraint_matrix: Matrix,
258    constraint_rhs: Vector,
259) -> Result<X, OptimizationError>
260where
261    H: Hessian,
262    J: Jacobian,
263    X: Solution,
264    for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
265{
266    if !matches!(newton_raphson.line_search, LineSearch::None) {
267        panic!("Line search needs the exact penalty function in constrained optimization.")
268    }
269    let mut decrement;
270    let num_variables = initial_guess.size();
271    let num_constraints = constraint_rhs.len();
272    let num_total = num_variables + num_constraints;
273    let mut multipliers = Vector::zero(num_constraints);
274    let mut residual = Vector::zero(num_total);
275    let mut solution = initial_guess;
276    let mut tangent = SquareMatrix::zero(num_total);
277    constraint_matrix
278        .iter()
279        .enumerate()
280        .for_each(|(i, constraint_matrix_i)| {
281            constraint_matrix_i
282                .iter()
283                .enumerate()
284                .for_each(|(j, constraint_matrix_ij)| {
285                    tangent[i + num_variables][j] = -constraint_matrix_ij;
286                    tangent[j][i + num_variables] = -constraint_matrix_ij;
287                })
288        });
289    for _ in 0..=newton_raphson.max_steps {
290        (jacobian(&solution)? - &multipliers * &constraint_matrix).fill_into_chained(
291            &constraint_rhs - &constraint_matrix * &solution,
292            &mut residual,
293        );
294        if residual.norm_inf() < newton_raphson.abs_tol {
295            return Ok(solution);
296        } else if let Some(ref band) = banded {
297            hessian(&solution)?.fill_into(&mut tangent);
298            decrement = tangent.solve_lu_banded(&residual, band)?
299        } else {
300            hessian(&solution)?.fill_into(&mut tangent);
301            decrement = tangent.solve_lu(&residual)?
302        }
303        solution.decrement_from_chained(&mut multipliers, decrement)
304    }
305    Err(OptimizationError::MaximumStepsReached(
306        newton_raphson.max_steps,
307        format!("{:?}", &newton_raphson),
308    ))
309}