conspire/math/integrate/backward_euler/
mod.rs1#[cfg(test)]
2mod test;
3
4use super::{
5 super::{
6 Hessian, Jacobian, Matrix, Solution, Tensor, TensorArray, TensorRank0, TensorVec, Vector,
7 interpolate::InterpolateSolution,
8 optimize::{
9 EqualityConstraint, FirstOrderRootFinding, NewtonRaphson, Optimization,
10 ZerothOrderRootFinding,
11 },
12 },
13 Implicit, IntegrationError,
14};
15use std::ops::{Div, Mul, Sub};
16
17#[derive(Debug)]
21pub struct BackwardEuler {
22 pub opt_alg: Optimization,
24}
25
26impl Default for BackwardEuler {
27 fn default() -> Self {
28 Self {
29 opt_alg: Optimization::NewtonRaphson(NewtonRaphson {
30 check_minimum: false,
31 ..Default::default()
32 }),
33 }
34 }
35}
36
37impl<Y, J, U> Implicit<Y, J, U> for BackwardEuler
38where
39 Self: InterpolateSolution<Y, U>,
40 Y: Jacobian + Solution + Div<J, Output = Y>,
41 for<'a> &'a Y: Mul<TensorRank0, Output = Y> + Sub<&'a Y, Output = Y>,
42 J: Hessian + Tensor + TensorArray,
43 U: TensorVec<Item = Y>,
44 Vector: From<Y>,
45 for<'a> &'a Matrix: Mul<&'a Y, Output = Vector>, {
47 fn integrate(
48 &self,
49 function: impl Fn(TensorRank0, &Y) -> Result<Y, IntegrationError>,
50 jacobian: impl Fn(TensorRank0, &Y) -> Result<J, IntegrationError>,
51 time: &[TensorRank0],
52 initial_condition: Y,
53 ) -> Result<(Vector, U, U), IntegrationError> {
54 if time.len() < 2 {
55 return Err(IntegrationError::LengthTimeLessThanTwo);
56 } else if time[0] >= time[time.len() - 1] {
57 return Err(IntegrationError::InitialTimeNotLessThanFinalTime);
58 }
59 let mut index = 0;
60 let mut t = time[0];
61 let mut dt;
62 let identity = J::identity();
63 let mut t_sol = Vector::zero(0);
64 t_sol.push(time[0]);
65 let mut t_trial;
66 let mut y = initial_condition.clone();
67 let mut y_sol = U::zero(0);
68 y_sol.push(initial_condition.clone());
69 let mut dydt_sol = U::zero(0);
70 dydt_sol.push(function(t, &y.clone())?);
71 let mut y_trial;
72 while t < time[time.len() - 1] {
73 t_trial = time[index + 1];
74 dt = t_trial - t;
75 y_trial = match &self.opt_alg {
76 Optimization::GradientDescent(gradient_descent) => gradient_descent.root(
77 |y_trial: &Y| Ok(y_trial - &y - &(&function(t_trial, y_trial)? * dt)),
78 y.clone(),
79 EqualityConstraint::None,
80 )?,
81 Optimization::NewtonRaphson(newton_raphson) => newton_raphson.root(
82 |y_trial: &Y| Ok(y_trial - &y - &(&function(t_trial, y_trial)? * dt)),
83 |y_trial: &Y| Ok(jacobian(t_trial, y_trial)? * -dt + &identity),
84 y.clone(),
85 EqualityConstraint::None,
86 )?,
87 };
88 t = t_trial;
89 y = y_trial;
90 t_sol.push(t);
91 y_sol.push(y.clone());
92 dydt_sol.push(function(t, &y)?);
93 index += 1;
94 }
95 Ok((t_sol, y_sol, dydt_sol))
96 }
97}
98
99impl<Y, U> InterpolateSolution<Y, U> for BackwardEuler
100where
101 Y: Jacobian + Solution + TensorArray,
102 for<'a> &'a Y: Mul<TensorRank0, Output = Y> + Sub<&'a Y, Output = Y>,
103 U: TensorVec<Item = Y>,
104 Vector: From<Y>,
105{
106 fn interpolate(
107 &self,
108 _time: &Vector,
109 _tp: &Vector,
110 _yp: &U,
111 _function: impl Fn(TensorRank0, &Y) -> Result<Y, IntegrationError>,
112 ) -> Result<(U, U), IntegrationError> {
113 unimplemented!()
114 }
115}