conspire/math/optimize/newton_raphson/
mod.rs

1#[cfg(test)]
2mod test;
3
4use super::{
5    super::{
6        Banded, Hessian, Jacobian, Matrix, Scalar, Solution, SquareMatrix, Tensor, TensorVec,
7        Vector,
8    },
9    BacktrackingLineSearch, EqualityConstraint, FirstOrderRootFinding, LineSearch,
10    OptimizationError, SecondOrderOptimization,
11};
12use crate::ABS_TOL;
13use std::{
14    fmt::{self, Debug, Formatter},
15    ops::{Div, Mul},
16};
17
18/// The Newton-Raphson method.
19pub struct NewtonRaphson {
20    /// Absolute error tolerance.
21    pub abs_tol: Scalar,
22    /// Line search algorithm.
23    pub line_search: LineSearch,
24    /// Maximum number of steps.
25    pub max_steps: usize,
26}
27
28impl<J, X> BacktrackingLineSearch<J, X> for NewtonRaphson {
29    fn get_line_search(&self) -> &LineSearch {
30        &self.line_search
31    }
32}
33
34impl Debug for NewtonRaphson {
35    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
36        write!(
37            f,
38            "NewtonRaphson {{ abs_tol: {:?}, line_search: {}, max_steps: {:?} }}",
39            self.abs_tol, self.line_search, self.max_steps
40        )
41    }
42}
43
44impl Default for NewtonRaphson {
45    fn default() -> Self {
46        Self {
47            abs_tol: ABS_TOL,
48            line_search: LineSearch::None,
49            max_steps: 25,
50        }
51    }
52}
53
54impl<F, J, X> FirstOrderRootFinding<F, J, X> for NewtonRaphson
55where
56    F: Jacobian,
57    for<'a> &'a F: Div<J, Output = X> + From<&'a X>,
58    J: Hessian,
59    X: Solution,
60    for<'a> &'a X: Mul<Scalar, Output = X>,
61    for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
62{
63    fn root(
64        &self,
65        function: impl Fn(&X) -> Result<F, String>,
66        jacobian: impl Fn(&X) -> Result<J, String>,
67        initial_guess: X,
68        equality_constraint: EqualityConstraint,
69    ) -> Result<X, OptimizationError> {
70        match equality_constraint {
71            EqualityConstraint::Fixed(indices) => constrained_fixed(
72                self,
73                |_: &X| panic!("No line search in root finding"),
74                function,
75                jacobian,
76                initial_guess,
77                None,
78                indices,
79            ),
80            EqualityConstraint::Linear(constraint_matrix, constraint_rhs) => constrained(
81                self,
82                |_: &X| panic!("No line search in root finding"),
83                function,
84                jacobian,
85                initial_guess,
86                None,
87                constraint_matrix,
88                constraint_rhs,
89            ),
90            EqualityConstraint::None => unconstrained(
91                self,
92                |_: &X| panic!("No line search in root finding"),
93                function,
94                jacobian,
95                initial_guess,
96            ),
97        }
98    }
99}
100
101impl<J, H, X> SecondOrderOptimization<Scalar, J, H, X> for NewtonRaphson
102where
103    H: Hessian,
104    J: Jacobian,
105    for<'a> &'a J: Div<H, Output = X> + From<&'a X>,
106    X: Solution,
107    for<'a> &'a X: Mul<Scalar, Output = X>,
108    for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
109{
110    fn minimize(
111        &self,
112        function: impl Fn(&X) -> Result<Scalar, String>,
113        jacobian: impl Fn(&X) -> Result<J, String>,
114        hessian: impl Fn(&X) -> Result<H, String>,
115        initial_guess: X,
116        equality_constraint: EqualityConstraint,
117        banded: Option<Banded>,
118    ) -> Result<X, OptimizationError> {
119        match match equality_constraint {
120            EqualityConstraint::Fixed(indices) => constrained_fixed(
121                self,
122                function,
123                jacobian,
124                hessian,
125                initial_guess,
126                banded,
127                indices,
128            ),
129            EqualityConstraint::Linear(constraint_matrix, constraint_rhs) => constrained(
130                self,
131                function,
132                jacobian,
133                hessian,
134                initial_guess,
135                banded,
136                constraint_matrix,
137                constraint_rhs,
138            ),
139            EqualityConstraint::None => {
140                unconstrained(self, function, jacobian, hessian, initial_guess)
141            }
142        } {
143            Ok(solution) => Ok(solution),
144            Err(error) => Err(OptimizationError::Upstream(
145                format!("{error}"),
146                format!("{self:?}"),
147            )),
148        }
149    }
150}
151
152fn unconstrained<J, H, X>(
153    newton_raphson: &NewtonRaphson,
154    function: impl Fn(&X) -> Result<Scalar, String>,
155    jacobian: impl Fn(&X) -> Result<J, String>,
156    hessian: impl Fn(&X) -> Result<H, String>,
157    initial_guess: X,
158) -> Result<X, OptimizationError>
159where
160    H: Hessian,
161    J: Jacobian,
162    for<'a> &'a J: Div<H, Output = X> + From<&'a X>,
163    X: Solution,
164    for<'a> &'a X: Mul<Scalar, Output = X>,
165{
166    let mut decrement;
167    let mut residual;
168    let mut solution = initial_guess;
169    let mut step_size;
170    let mut tangent;
171    for _ in 0..newton_raphson.max_steps {
172        residual = jacobian(&solution)?;
173        tangent = hessian(&solution)?;
174        if residual.norm_inf() < newton_raphson.abs_tol {
175            return Ok(solution);
176        } else {
177            decrement = &residual / tangent;
178            step_size = newton_raphson.backtracking_line_search(
179                &function, &jacobian, &solution, &residual, &decrement, 1.0,
180            )?;
181            if step_size != 1.0 {
182                decrement *= step_size
183            }
184            solution -= decrement
185        }
186    }
187    Err(OptimizationError::MaximumStepsReached(
188        newton_raphson.max_steps,
189        format!("{:?}", &newton_raphson),
190    ))
191}
192
193#[allow(clippy::too_many_arguments)]
194fn constrained_fixed<J, H, X>(
195    newton_raphson: &NewtonRaphson,
196    function: impl Fn(&X) -> Result<Scalar, String>,
197    jacobian: impl Fn(&X) -> Result<J, String>,
198    hessian: impl Fn(&X) -> Result<H, String>,
199    initial_guess: X,
200    banded: Option<Banded>,
201    indices: Vec<usize>,
202) -> Result<X, OptimizationError>
203where
204    H: Hessian,
205    J: Jacobian,
206    for<'a> &'a J: From<&'a X>,
207    X: Solution,
208    for<'a> &'a X: Mul<Scalar, Output = X>,
209{
210    let mut retained = vec![true; initial_guess.num_entries()];
211    indices.iter().for_each(|&index| retained[index] = false);
212    let mut decrement;
213    let mut residual;
214    let mut solution = initial_guess;
215    let mut step_size;
216    let mut tangent;
217    for _ in 0..newton_raphson.max_steps {
218        residual = jacobian(&solution)?.retain_from(&retained);
219        tangent = hessian(&solution)?.retain_from(&retained);
220        if residual.norm_inf() < newton_raphson.abs_tol {
221            return Ok(solution);
222        } else if let Some(ref band) = banded {
223            decrement = tangent.solve_lu_banded(&residual, band)?
224        } else {
225            decrement = tangent.solve_lu(&residual)?
226        }
227        if !matches!(newton_raphson.line_search, LineSearch::None) {
228            let mut decrement_full = &solution * 0.0;
229            decrement_full.decrement_from_retained(&retained, &decrement);
230            decrement_full *= -1.0;
231            step_size = newton_raphson.backtracking_line_search(
232                &function,
233                &jacobian,
234                &solution,
235                &jacobian(&solution)?,
236                &decrement_full,
237                1.0,
238            )?;
239            if step_size != 1.0 {
240                decrement *= step_size
241            }
242        }
243        solution.decrement_from_retained(&retained, &decrement)
244    }
245    Err(OptimizationError::MaximumStepsReached(
246        newton_raphson.max_steps,
247        format!("{:?}", &newton_raphson),
248    ))
249}
250
251#[allow(clippy::too_many_arguments)]
252fn constrained<J, H, X>(
253    newton_raphson: &NewtonRaphson,
254    _function: impl Fn(&X) -> Result<Scalar, String>,
255    jacobian: impl Fn(&X) -> Result<J, String>,
256    hessian: impl Fn(&X) -> Result<H, String>,
257    initial_guess: X,
258    banded: Option<Banded>,
259    constraint_matrix: Matrix,
260    constraint_rhs: Vector,
261) -> Result<X, OptimizationError>
262where
263    H: Hessian,
264    J: Jacobian,
265    X: Solution,
266    for<'a> &'a Matrix: Mul<&'a X, Output = Vector>,
267{
268    if !matches!(newton_raphson.line_search, LineSearch::None) {
269        panic!("Line search needs the exact penalty function in constrained optimization.")
270    }
271    let mut decrement;
272    let num_variables = initial_guess.num_entries();
273    let num_constraints = constraint_rhs.len();
274    let num_total = num_variables + num_constraints;
275    let mut multipliers = Vector::zero(num_constraints);
276    let mut residual = Vector::zero(num_total);
277    let mut solution = initial_guess;
278    let mut tangent = SquareMatrix::zero(num_total);
279    constraint_matrix
280        .iter()
281        .enumerate()
282        .for_each(|(i, constraint_matrix_i)| {
283            constraint_matrix_i
284                .iter()
285                .enumerate()
286                .for_each(|(j, constraint_matrix_ij)| {
287                    tangent[i + num_variables][j] = -constraint_matrix_ij;
288                    tangent[j][i + num_variables] = -constraint_matrix_ij;
289                })
290        });
291    for _ in 0..newton_raphson.max_steps {
292        (jacobian(&solution)? - &multipliers * &constraint_matrix).fill_into_chained(
293            &constraint_rhs - &constraint_matrix * &solution,
294            &mut residual,
295        );
296        hessian(&solution)?.fill_into(&mut tangent);
297        if residual.norm_inf() < newton_raphson.abs_tol {
298            return Ok(solution);
299        } else if let Some(ref band) = banded {
300            decrement = tangent.solve_lu_banded(&residual, band)?
301        } else {
302            decrement = tangent.solve_lu(&residual)?
303        }
304        solution.decrement_from_chained(&mut multipliers, decrement)
305    }
306    Err(OptimizationError::MaximumStepsReached(
307        newton_raphson.max_steps,
308        format!("{:?}", &newton_raphson),
309    ))
310}