conspire/math/matrix/square/
mod.rs1#[cfg(test)]
2mod test;
3
4#[cfg(test)]
5use crate::math::test::ErrorTensor;
6
7use crate::{
8 ABS_TOL,
9 math::{
10 Hessian, Rank2, Tensor, TensorRank0, TensorRank2Vec2D, TensorVec, Vector,
11 write_tensor_rank_0,
12 },
13};
14use std::{
15 collections::VecDeque,
16 fmt::{self, Display, Formatter},
17 ops::{Add, AddAssign, Div, DivAssign, Index, IndexMut, Mul, MulAssign, Sub, SubAssign},
18 vec::IntoIter,
19};
20
21pub struct Banded {
23 bandwidth: usize,
24 inverse: Vec<usize>,
25 mapping: Vec<usize>,
26}
27
28impl Banded {
29 pub fn map(&self, old: usize) -> usize {
30 self.inverse[old]
31 }
32 pub fn old(&self, new: usize) -> usize {
33 self.mapping[new]
34 }
35 pub fn width(&self) -> usize {
36 self.bandwidth
37 }
38}
39
40impl From<Vec<Vec<bool>>> for Banded {
41 fn from(structure: Vec<Vec<bool>>) -> Self {
42 let num = structure.len();
43 structure.iter().enumerate().for_each(|(i, row_i)| {
44 assert_eq!(row_i.len(), num);
45 row_i
46 .iter()
47 .zip(structure.iter())
48 .for_each(|(entry_ij, row_j)| assert_eq!(&row_j[i], entry_ij))
49 });
50 let mut adj_list = vec![Vec::new(); num];
51 for i in 0..num {
52 for j in 0..num {
53 if structure[i][j] && i != j {
54 adj_list[i].push(j);
55 }
56 }
57 }
58 let start_vertex = (0..num)
59 .min_by_key(|&i| adj_list[i].len())
60 .expect("Matrix must have at least one entry.");
61 let mut visited = vec![false; num];
62 let mut mapping = Vec::new();
63 let mut queue = VecDeque::new();
64 queue.push_back(start_vertex);
65 visited[start_vertex] = true;
66 while let Some(vertex) = queue.pop_front() {
67 mapping.push(vertex);
68 let mut neighbors: Vec<usize> = adj_list[vertex]
69 .iter()
70 .filter(|&&neighbor| !visited[neighbor])
71 .copied()
72 .collect();
73 neighbors.sort_by_key(|&neighbor| adj_list[neighbor].len());
74 for neighbor in neighbors {
75 visited[neighbor] = true;
76 queue.push_back(neighbor);
77 }
78 }
79 mapping.reverse();
80 let mut inverse = vec![0; num];
81 mapping
82 .iter()
83 .enumerate()
84 .for_each(|(new, &old)| inverse[old] = new);
85 let mut bandwidth = 0;
86 (0..num).for_each(|i| {
87 (i + 1..num).for_each(|j| {
88 if structure[mapping[i]][mapping[j]] {
89 bandwidth = bandwidth.max(j - i)
90 }
91 })
92 });
93 Self {
94 bandwidth,
95 mapping,
96 inverse,
97 }
98 }
99}
100
101#[derive(Debug, PartialEq)]
103pub enum SquareMatrixError {
104 Singular,
105}
106
107#[derive(Clone, Debug, PartialEq)]
109pub struct SquareMatrix(Vec<Vector>);
110
111impl SquareMatrix {
112 pub fn solve_lu(&self, b: &Vector) -> Result<Vector, SquareMatrixError> {
189 let n = self.len();
190 let mut p: Vec<usize> = (0..n).collect();
191 let mut factor;
192 let mut lu = self.clone();
193 let mut max_row;
194 let mut max_val;
195 let mut pivot;
196 for i in 0..n {
197 max_row = i;
198 max_val = lu[max_row][i].abs();
199 for k in i + 1..n {
200 if lu[k][i].abs() > max_val {
201 max_row = k;
202 max_val = lu[max_row][i].abs();
203 }
204 }
205 if max_row != i {
206 lu.0.swap(i, max_row);
207 p.swap(i, max_row);
208 }
209 pivot = lu[i][i];
210 if pivot.abs() < ABS_TOL {
211 return Err(SquareMatrixError::Singular);
212 }
213 for j in i + 1..n {
214 if lu[j][i] != 0.0 {
215 lu[j][i] /= pivot;
216 factor = lu[j][i];
217 for k in i + 1..n {
218 lu[j][k] -= factor * lu[i][k];
219 }
220 }
221 }
222 }
223 let mut x: Vector = p.into_iter().map(|p_i| b[p_i]).collect();
224 forward_substitution(&mut x, &lu);
225 backward_substitution(&mut x, &lu);
226 Ok(x)
227 }
228 pub fn solve_lu_banded(
230 &self,
231 b: &Vector,
232 banded: &Banded,
233 ) -> Result<Vector, SquareMatrixError> {
234 let bandwidth = banded.width();
235 let mut bandwidth_updated;
236 let n = self.len();
237 let mut p: Vec<usize> = (0..n).collect();
238 let mut end;
239 let mut factor;
240 let mut max_row;
241 let mut max_val;
242 let mut pivot;
243 let mut rearr: Self = (0..n)
244 .map(|i| (0..n).map(|j| self[banded.old(i)][banded.old(j)]).collect())
245 .collect();
246 for i in 0..n {
247 end = n.min(i + 1 + bandwidth);
248 pivot = rearr[i][i];
249 if pivot.abs() < ABS_TOL {
250 max_row = i;
251 max_val = rearr[max_row][i].abs();
252 for k in i + 1..end {
253 if rearr[k][i].abs() > max_val {
254 max_row = k;
255 max_val = rearr[max_row][i].abs();
256 }
257 }
258 if max_row != i {
259 rearr.0.swap(i, max_row);
260 p.swap(i, max_row);
261 pivot = rearr[i][i];
262 if pivot.abs() < ABS_TOL {
263 return Err(SquareMatrixError::Singular);
264 }
265 }
266 }
267 bandwidth_updated = bandwidth;
268 (i + 1 + bandwidth..n).for_each(|j| {
269 if rearr[i][j] != 0.0 {
270 bandwidth_updated = bandwidth_updated.max(j - i)
271 }
272 });
273 end = n.min(i + 1 + bandwidth_updated);
274 for j in i + 1..end {
275 if rearr[j][i] != 0.0 {
276 rearr[j][i] /= pivot;
277 factor = rearr[j][i];
278 for k in i + 1..end {
279 rearr[j][k] -= factor * rearr[i][k];
280 }
281 }
282 }
283 }
284 let mut x: Vector = p.into_iter().map(|p_i| b[banded.old(p_i)]).collect();
285 forward_substitution(&mut x, &rearr);
286 backward_substitution(&mut x, &rearr);
287 Ok((0..n).map(|i| x[banded.map(i)]).collect())
288 }
289}
290
291fn forward_substitution(x: &mut Vector, a: &SquareMatrix) {
292 a.iter().enumerate().for_each(|(i, a_i)| {
293 x[i] -= a_i
294 .iter()
295 .take(i)
296 .zip(x.iter().take(i))
297 .map(|(a_ij, x_j)| a_ij * x_j)
298 .sum::<TensorRank0>()
299 })
300}
301
302fn backward_substitution(x: &mut Vector, a: &SquareMatrix) {
303 a.0.iter().enumerate().rev().for_each(|(i, a_i)| {
304 x[i] -= a_i
305 .iter()
306 .skip(i + 1)
307 .zip(x.iter().skip(i + 1))
308 .map(|(a_ij, x_j)| a_ij * x_j)
309 .sum::<TensorRank0>();
310 x[i] /= a_i[i];
311 })
312}
313
314#[cfg(test)]
315impl ErrorTensor for SquareMatrix {
316 fn error_fd(&self, comparator: &Self, epsilon: &TensorRank0) -> Option<(bool, usize)> {
317 let error_count = self
318 .iter()
319 .zip(comparator.iter())
320 .map(|(self_i, comparator_i)| {
321 self_i
322 .iter()
323 .zip(comparator_i.iter())
324 .filter(|&(&self_ij, &comparator_ij)| {
325 &(self_ij / comparator_ij - 1.0).abs() >= epsilon
326 && (&self_ij.abs() >= epsilon || &comparator_ij.abs() >= epsilon)
327 })
328 .count()
329 })
330 .sum();
331 if error_count > 0 {
332 Some((true, error_count))
333 } else {
334 None
335 }
336 }
337}
338
339impl Display for SquareMatrix {
340 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
341 write!(f, "\x1B[s")?;
342 write!(f, "[[")?;
343 self.iter().enumerate().try_for_each(|(i, row)| {
344 row.iter()
345 .try_for_each(|entry| write_tensor_rank_0(f, entry))?;
346 if i + 1 < self.len() {
347 writeln!(f, "\x1B[2D],")?;
348 write!(f, "\x1B[u")?;
349 write!(f, "\x1B[{}B [", i + 1)?;
350 }
351 Ok(())
352 })?;
353 write!(f, "\x1B[2D]]")
354 }
355}
356
357impl<const D: usize, const I: usize, const J: usize> From<TensorRank2Vec2D<D, I, J>>
358 for SquareMatrix
359{
360 fn from(tensor_rank_2_vec_2d: TensorRank2Vec2D<D, I, J>) -> Self {
361 let mut square_matrix = Self::zero(tensor_rank_2_vec_2d.len() * D);
362 tensor_rank_2_vec_2d
363 .iter()
364 .enumerate()
365 .for_each(|(a, entry_a)| {
366 entry_a.iter().enumerate().for_each(|(b, entry_ab)| {
367 entry_ab.iter().enumerate().for_each(|(i, entry_ab_i)| {
368 entry_ab_i.iter().enumerate().for_each(|(j, entry_ab_ij)| {
369 square_matrix[D * a + i][D * b + j] = *entry_ab_ij
370 })
371 })
372 })
373 });
374 square_matrix
375 }
376}
377
378impl FromIterator<Vector> for SquareMatrix {
379 fn from_iter<Ii: IntoIterator<Item = Vector>>(into_iterator: Ii) -> Self {
380 Self(Vec::from_iter(into_iterator))
381 }
382}
383
384impl Index<usize> for SquareMatrix {
385 type Output = Vector;
386 fn index(&self, index: usize) -> &Self::Output {
387 &self.0[index]
388 }
389}
390
391impl IndexMut<usize> for SquareMatrix {
392 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
393 &mut self.0[index]
394 }
395}
396
397impl Hessian for SquareMatrix {
398 fn fill_into(self, square_matrix: &mut SquareMatrix) {
399 self.into_iter()
400 .zip(square_matrix.iter_mut())
401 .for_each(|(self_i, square_matrix_i)| {
402 self_i
403 .into_iter()
404 .zip(square_matrix_i.iter_mut())
405 .for_each(|(self_ij, square_matrix_ij)| *square_matrix_ij = self_ij)
406 });
407 }
408}
409
410impl Rank2 for SquareMatrix {
411 type Transpose = Self;
412 fn deviatoric(&self) -> Self {
413 let len = self.len();
414 let scale = -self.trace() / len as TensorRank0;
415 (0..len)
416 .map(|i| {
417 (0..len)
418 .map(|j| ((i == j) as u8) as TensorRank0 * scale)
419 .collect()
420 })
421 .collect::<Self>()
422 + self
423 }
424 fn deviatoric_and_trace(&self) -> (Self, TensorRank0) {
425 let len = self.len();
426 let trace = self.trace();
427 let scale = -trace / len as TensorRank0;
428 (
429 (0..len)
430 .map(|i| {
431 (0..len)
432 .map(|j| ((i == j) as u8) as TensorRank0 * scale)
433 .collect()
434 })
435 .collect::<Self>()
436 + self,
437 trace,
438 )
439 }
440 fn is_diagonal(&self) -> bool {
441 self.iter()
442 .enumerate()
443 .map(|(i, self_i)| {
444 self_i
445 .iter()
446 .enumerate()
447 .map(|(j, self_ij)| (self_ij == &0.0) as u8 * (i != j) as u8)
448 .sum::<u8>()
449 })
450 .sum::<u8>()
451 == (self.len().pow(2) - self.len()) as u8
452 }
453 fn is_identity(&self) -> bool {
454 self.iter().enumerate().all(|(i, self_i)| {
455 self_i
456 .iter()
457 .enumerate()
458 .all(|(j, self_ij)| self_ij == &((i == j) as u8 as TensorRank0))
459 })
460 }
461 fn is_symmetric(&self) -> bool {
462 self.iter().enumerate().all(|(i, self_i)| {
463 self_i
464 .iter()
465 .zip(self.iter())
466 .all(|(self_ij, self_j)| self_ij == &self_j[i])
467 })
468 }
469 fn squared_trace(&self) -> TensorRank0 {
470 self.iter()
471 .enumerate()
472 .map(|(i, self_i)| {
473 self_i
474 .iter()
475 .zip(self.iter())
476 .map(|(self_ij, self_j)| self_ij * self_j[i])
477 .sum::<TensorRank0>()
478 })
479 .sum()
480 }
481 fn trace(&self) -> TensorRank0 {
482 self.iter().enumerate().map(|(i, self_i)| self_i[i]).sum()
483 }
484 fn transpose(&self) -> Self::Transpose {
485 (0..self.len())
486 .map(|i| (0..self.len()).map(|j| self[j][i]).collect())
487 .collect()
488 }
489}
490
491impl Tensor for SquareMatrix {
492 type Item = Vector;
493 fn iter(&self) -> impl Iterator<Item = &Self::Item> {
494 self.0.iter()
495 }
496 fn iter_mut(&mut self) -> impl Iterator<Item = &mut Self::Item> {
497 self.0.iter_mut()
498 }
499}
500
501impl IntoIterator for SquareMatrix {
502 type Item = Vector;
503 type IntoIter = IntoIter<Self::Item>;
504 fn into_iter(self) -> Self::IntoIter {
505 self.0.into_iter()
506 }
507}
508
509impl TensorVec for SquareMatrix {
510 type Item = Vector;
511 type Slice<'a> = &'a [&'a [TensorRank0]];
512 fn append(&mut self, other: &mut Self) {
513 self.0.append(&mut other.0)
514 }
515 fn is_empty(&self) -> bool {
516 self.0.is_empty()
517 }
518 fn len(&self) -> usize {
519 self.0.len()
520 }
521 fn new(slice: Self::Slice<'_>) -> Self {
522 slice
523 .iter()
524 .map(|slice_entry| Self::Item::new(slice_entry))
525 .collect()
526 }
527 fn push(&mut self, item: Self::Item) {
528 self.0.push(item)
529 }
530 fn remove(&mut self, index: usize) -> Self::Item {
531 self.0.remove(index)
532 }
533 fn retain<F>(&mut self, f: F)
534 where
535 F: FnMut(&Self::Item) -> bool,
536 {
537 self.0.retain(f)
538 }
539 fn swap_remove(&mut self, index: usize) -> Self::Item {
540 self.0.swap_remove(index)
541 }
542 fn zero(len: usize) -> Self {
543 (0..len).map(|_| Self::Item::zero(len)).collect()
544 }
545}
546
547impl Div<TensorRank0> for SquareMatrix {
548 type Output = Self;
549 fn div(mut self, tensor_rank_0: TensorRank0) -> Self::Output {
550 self /= &tensor_rank_0;
551 self
552 }
553}
554
555impl Div<&TensorRank0> for SquareMatrix {
556 type Output = Self;
557 fn div(mut self, tensor_rank_0: &TensorRank0) -> Self::Output {
558 self /= tensor_rank_0;
559 self
560 }
561}
562
563impl DivAssign<TensorRank0> for SquareMatrix {
564 fn div_assign(&mut self, tensor_rank_0: TensorRank0) {
565 self.iter_mut().for_each(|entry| *entry /= &tensor_rank_0);
566 }
567}
568
569impl DivAssign<&TensorRank0> for SquareMatrix {
570 fn div_assign(&mut self, tensor_rank_0: &TensorRank0) {
571 self.iter_mut().for_each(|entry| *entry /= tensor_rank_0);
572 }
573}
574
575impl Mul<TensorRank0> for SquareMatrix {
576 type Output = Self;
577 fn mul(mut self, tensor_rank_0: TensorRank0) -> Self::Output {
578 self *= &tensor_rank_0;
579 self
580 }
581}
582
583impl Mul<&TensorRank0> for SquareMatrix {
584 type Output = Self;
585 fn mul(mut self, tensor_rank_0: &TensorRank0) -> Self::Output {
586 self *= tensor_rank_0;
587 self
588 }
589}
590
591impl Mul<&TensorRank0> for &SquareMatrix {
592 type Output = SquareMatrix;
593 fn mul(self, tensor_rank_0: &TensorRank0) -> Self::Output {
594 self.iter().map(|self_i| self_i * tensor_rank_0).collect()
595 }
596}
597
598impl MulAssign<TensorRank0> for SquareMatrix {
599 fn mul_assign(&mut self, tensor_rank_0: TensorRank0) {
600 self.iter_mut().for_each(|entry| *entry *= &tensor_rank_0);
601 }
602}
603
604impl MulAssign<&TensorRank0> for SquareMatrix {
605 fn mul_assign(&mut self, tensor_rank_0: &TensorRank0) {
606 self.iter_mut().for_each(|entry| *entry *= tensor_rank_0);
607 }
608}
609
610impl Mul<Vector> for SquareMatrix {
611 type Output = Vector;
612 fn mul(self, vector: Vector) -> Self::Output {
613 self.iter().map(|self_i| self_i * &vector).collect()
614 }
615}
616
617impl Mul<&Vector> for SquareMatrix {
618 type Output = Vector;
619 fn mul(self, vector: &Vector) -> Self::Output {
620 self.iter().map(|self_i| self_i * vector).collect()
621 }
622}
623
624impl Add for SquareMatrix {
625 type Output = Self;
626 fn add(mut self, vector: Self) -> Self::Output {
627 self += vector;
628 self
629 }
630}
631
632impl Add<&Self> for SquareMatrix {
633 type Output = Self;
634 fn add(mut self, vector: &Self) -> Self::Output {
635 self += vector;
636 self
637 }
638}
639
640impl AddAssign for SquareMatrix {
641 fn add_assign(&mut self, vector: Self) {
642 self.iter_mut()
643 .zip(vector.iter())
644 .for_each(|(self_entry, tensor_rank_1)| *self_entry += tensor_rank_1);
645 }
646}
647
648impl AddAssign<&Self> for SquareMatrix {
649 fn add_assign(&mut self, vector: &Self) {
650 self.iter_mut()
651 .zip(vector.iter())
652 .for_each(|(self_entry, tensor_rank_1)| *self_entry += tensor_rank_1);
653 }
654}
655
656impl Mul for SquareMatrix {
657 type Output = Self;
658 fn mul(self, matrix: Self) -> Self::Output {
659 let mut output = Self::zero(matrix.len());
660 self.iter()
661 .zip(output.iter_mut())
662 .for_each(|(self_i, output_i)| {
663 self_i
664 .iter()
665 .zip(matrix.iter())
666 .for_each(|(self_ij, matrix_j)| *output_i += matrix_j * self_ij)
667 });
668 output
669 }
670}
671
672impl Sub for SquareMatrix {
673 type Output = Self;
674 fn sub(mut self, square_matrix: Self) -> Self::Output {
675 self -= square_matrix;
676 self
677 }
678}
679
680impl Sub<&Self> for SquareMatrix {
681 type Output = Self;
682 fn sub(mut self, square_matrix: &Self) -> Self::Output {
683 self -= square_matrix;
684 self
685 }
686}
687
688impl Sub for &SquareMatrix {
689 type Output = SquareMatrix;
690 fn sub(self, square_matrix: Self) -> Self::Output {
691 square_matrix
692 .iter()
693 .zip(self.iter())
694 .map(|(square_matrix_i, self_i)| self_i - square_matrix_i)
695 .collect()
696 }
697}
698
699impl SubAssign for SquareMatrix {
700 fn sub_assign(&mut self, square_matrix: Self) {
701 self.iter_mut()
702 .zip(square_matrix.iter())
703 .for_each(|(self_entry, tensor_rank_1)| *self_entry -= tensor_rank_1);
704 }
705}
706
707impl SubAssign<&Self> for SquareMatrix {
708 fn sub_assign(&mut self, square_matrix: &Self) {
709 self.iter_mut()
710 .zip(square_matrix.iter())
711 .for_each(|(self_entry, tensor_rank_1)| *self_entry -= tensor_rank_1);
712 }
713}