conspire/math/optimize/newton_raphson/
mod.rs

1#[cfg(test)]
2mod test;
3
4use super::{
5    super::{Banded, Hessian, Jacobian, Matrix, Scalar, Solution, SquareMatrix, Tensor, Vector},
6    BacktrackingLineSearch, EqualityConstraint, FirstOrderRootFinding, LineSearch,
7    OptimizationError, SecondOrderOptimization,
8};
9use crate::ABS_TOL;
10use std::{
11    fmt::{self, Debug, Formatter},
12    ops::{Div, Mul},
13};
14
15/// The Newton-Raphson method.
16pub struct NewtonRaphson {
17    /// Absolute error tolerance.
18    pub abs_tol: Scalar,
19    /// Line search algorithm.
20    pub line_search: LineSearch,
21    /// Maximum number of steps.
22    pub max_steps: usize,
23}
24
25impl<J, X> BacktrackingLineSearch<J, X> for NewtonRaphson {
26    fn get_line_search(&self) -> &LineSearch {
27        &self.line_search
28    }
29}
30
31impl Debug for NewtonRaphson {
32    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
33        write!(
34            f,
35            "NewtonRaphson {{ abs_tol: {:?}, line_search: {}, max_steps: {:?} }}",
36            self.abs_tol, self.line_search, self.max_steps
37        )
38    }
39}
40
41impl Default for NewtonRaphson {
42    fn default() -> Self {
43        Self {
44            abs_tol: ABS_TOL,
45            line_search: LineSearch::None,
46            max_steps: 25,
47        }
48    }
49}
50
51impl<F, J, X> FirstOrderRootFinding<F, J, X> for NewtonRaphson
52where
53    F: Jacobian,
54    for<'a> &'a F: Div<J, Output = X> + From<&'a X>,
55    J: Hessian,
56    X: Solution,
57    for<'a> &'a X: Mul<Scalar, Output = X>,
58    for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
59{
60    fn root(
61        &self,
62        function: impl FnMut(&X) -> Result<F, String>,
63        jacobian: impl FnMut(&X) -> Result<J, String>,
64        initial_guess: X,
65        equality_constraint: EqualityConstraint,
66    ) -> Result<X, OptimizationError> {
67        match equality_constraint {
68            EqualityConstraint::Fixed(indices) => constrained_fixed(
69                self,
70                |_: &X| panic!("No line search in root finding"),
71                function,
72                jacobian,
73                initial_guess,
74                None,
75                indices,
76            ),
77            EqualityConstraint::Linear(constraint_matrix, constraint_rhs) => constrained(
78                self,
79                |_: &X| panic!("No line search in root finding"),
80                function,
81                jacobian,
82                initial_guess,
83                None,
84                constraint_matrix,
85                constraint_rhs,
86            ),
87            EqualityConstraint::None => unconstrained(
88                self,
89                |_: &X| panic!("No line search in root finding"),
90                function,
91                jacobian,
92                initial_guess,
93            ),
94        }
95    }
96}
97
98impl<J, H, X> SecondOrderOptimization<Scalar, J, H, X> for NewtonRaphson
99where
100    H: Hessian,
101    J: Jacobian,
102    for<'a> &'a J: Div<H, Output = X> + From<&'a X>,
103    X: Solution,
104    for<'a> &'a X: Mul<Scalar, Output = X>,
105    for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
106{
107    fn minimize(
108        &self,
109        function: impl FnMut(&X) -> Result<Scalar, String>,
110        jacobian: impl FnMut(&X) -> Result<J, String>,
111        hessian: impl FnMut(&X) -> Result<H, String>,
112        initial_guess: X,
113        equality_constraint: EqualityConstraint,
114        banded: Option<Banded>,
115    ) -> Result<X, OptimizationError> {
116        match match equality_constraint {
117            EqualityConstraint::Fixed(indices) => constrained_fixed(
118                self,
119                function,
120                jacobian,
121                hessian,
122                initial_guess,
123                banded,
124                indices,
125            ),
126            EqualityConstraint::Linear(constraint_matrix, constraint_rhs) => constrained(
127                self,
128                function,
129                jacobian,
130                hessian,
131                initial_guess,
132                banded,
133                constraint_matrix,
134                constraint_rhs,
135            ),
136            EqualityConstraint::None => {
137                unconstrained(self, function, jacobian, hessian, initial_guess)
138            }
139        } {
140            Ok(solution) => Ok(solution),
141            Err(error) => Err(OptimizationError::Upstream(
142                format!("{error}"),
143                format!("{self:?}"),
144            )),
145        }
146    }
147}
148
149fn unconstrained<J, H, X>(
150    newton_raphson: &NewtonRaphson,
151    mut function: impl FnMut(&X) -> Result<Scalar, String>,
152    mut jacobian: impl FnMut(&X) -> Result<J, String>,
153    mut hessian: impl FnMut(&X) -> Result<H, String>,
154    initial_guess: X,
155) -> Result<X, OptimizationError>
156where
157    H: Hessian,
158    J: Jacobian,
159    for<'a> &'a J: Div<H, Output = X> + From<&'a X>,
160    X: Solution,
161    for<'a> &'a X: Mul<Scalar, Output = X>,
162{
163    let mut decrement;
164    let mut residual;
165    let mut solution = initial_guess;
166    let mut step_size;
167    let mut tangent;
168    for _ in 0..=newton_raphson.max_steps {
169        residual = jacobian(&solution)?;
170        if residual.norm_inf() < newton_raphson.abs_tol {
171            return Ok(solution);
172        } else {
173            tangent = hessian(&solution)?;
174            decrement = &residual / tangent;
175            step_size = newton_raphson.backtracking_line_search(
176                &mut function,
177                &mut jacobian,
178                &solution,
179                &residual,
180                &decrement,
181                1.0,
182            )?;
183            if step_size != 1.0 {
184                decrement *= step_size
185            }
186            solution -= decrement
187        }
188    }
189    Err(OptimizationError::MaximumStepsReached(
190        newton_raphson.max_steps,
191        format!("{:?}", &newton_raphson),
192    ))
193}
194
195#[allow(clippy::too_many_arguments)]
196fn constrained_fixed<J, H, X>(
197    newton_raphson: &NewtonRaphson,
198    mut function: impl FnMut(&X) -> Result<Scalar, String>,
199    mut jacobian: impl FnMut(&X) -> Result<J, String>,
200    mut hessian: impl FnMut(&X) -> Result<H, String>,
201    initial_guess: X,
202    banded: Option<Banded>,
203    indices: Vec<usize>,
204) -> Result<X, OptimizationError>
205where
206    H: Hessian,
207    J: Jacobian,
208    for<'a> &'a J: From<&'a X>,
209    X: Solution,
210    for<'a> &'a X: Mul<Scalar, Output = X>,
211{
212    let mut retained = vec![true; initial_guess.size()];
213    indices.iter().for_each(|&index| retained[index] = false);
214    let mut decrement;
215    let mut residual;
216    let mut solution = initial_guess;
217    let mut step_size;
218    let mut tangent;
219    for _ in 0..=newton_raphson.max_steps {
220        residual = jacobian(&solution)?.retain_from(&retained);
221        if residual.norm_inf() < newton_raphson.abs_tol {
222            return Ok(solution);
223        } else if let Some(ref band) = banded {
224            tangent = hessian(&solution)?.retain_from(&retained);
225            decrement = tangent.solve_lu_banded(&residual, band)?
226        } else {
227            tangent = hessian(&solution)?.retain_from(&retained);
228            decrement = tangent.solve_lu(&residual)?
229        }
230        if !matches!(newton_raphson.line_search, LineSearch::None) {
231            let jac = jacobian(&solution)?;
232            let mut decrement_full = &solution * 0.0;
233            decrement_full.decrement_from_retained(&retained, &decrement);
234            decrement_full *= -1.0;
235            step_size = newton_raphson.backtracking_line_search(
236                &mut function,
237                &mut jacobian,
238                &solution,
239                &jac,
240                &decrement_full,
241                1.0,
242            )?;
243            if step_size != 1.0 {
244                decrement *= step_size
245            }
246        }
247        solution.decrement_from_retained(&retained, &decrement)
248    }
249    Err(OptimizationError::MaximumStepsReached(
250        newton_raphson.max_steps,
251        format!("{:?}", &newton_raphson),
252    ))
253}
254
255#[allow(clippy::too_many_arguments)]
256fn constrained<J, H, X>(
257    newton_raphson: &NewtonRaphson,
258    _function: impl FnMut(&X) -> Result<Scalar, String>,
259    mut jacobian: impl FnMut(&X) -> Result<J, String>,
260    mut hessian: impl FnMut(&X) -> Result<H, String>,
261    initial_guess: X,
262    banded: Option<Banded>,
263    constraint_matrix: Matrix,
264    constraint_rhs: Vector,
265) -> Result<X, OptimizationError>
266where
267    H: Hessian,
268    J: Jacobian,
269    X: Solution,
270    for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
271{
272    if !matches!(newton_raphson.line_search, LineSearch::None) {
273        panic!("Line search needs the exact penalty function in constrained optimization.")
274    }
275    let mut decrement;
276    let num_variables = initial_guess.size();
277    let num_constraints = constraint_rhs.len();
278    let num_total = num_variables + num_constraints;
279    let mut multipliers = Vector::zero(num_constraints);
280    let mut residual = Vector::zero(num_total);
281    let mut solution = initial_guess;
282    let mut tangent = SquareMatrix::zero(num_total);
283    constraint_matrix
284        .iter()
285        .enumerate()
286        .for_each(|(i, constraint_matrix_i)| {
287            constraint_matrix_i
288                .iter()
289                .enumerate()
290                .for_each(|(j, constraint_matrix_ij)| {
291                    tangent[i + num_variables][j] = -constraint_matrix_ij;
292                    tangent[j][i + num_variables] = -constraint_matrix_ij;
293                })
294        });
295    for _ in 0..=newton_raphson.max_steps {
296        (jacobian(&solution)? - &multipliers * &constraint_matrix).fill_into_chained(
297            &constraint_rhs - &constraint_matrix * &solution,
298            &mut residual,
299        );
300        if residual.norm_inf() < newton_raphson.abs_tol {
301            return Ok(solution);
302        } else if let Some(ref band) = banded {
303            hessian(&solution)?.fill_into(&mut tangent);
304            decrement = tangent.solve_lu_banded(&residual, band)?
305        } else {
306            hessian(&solution)?.fill_into(&mut tangent);
307            decrement = tangent.solve_lu(&residual)?
308        }
309        solution.decrement_from_chained(&mut multipliers, decrement)
310    }
311    Err(OptimizationError::MaximumStepsReached(
312        newton_raphson.max_steps,
313        format!("{:?}", &newton_raphson),
314    ))
315}