diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9e82437b..da6cb08f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -118,13 +118,13 @@ jobs: - name: Rust-SPICE JPL DE validation run: | - RUST_BACKTRACE=1 RUST_LOG=debug cargo test validate_jplde_de440s --features validation --release -- --nocapture --ignored - RUST_BACKTRACE=1 RUST_LOG=debug cargo test validate_jplde_de440_full --features validation --release -- --nocapture --ignored + RUST_BACKTRACE=1 RUST_LOG=debug cargo test validate_jplde_de440s --features spkezr_validation --release -- --nocapture --ignored + RUST_BACKTRACE=1 RUST_LOG=debug cargo test validate_jplde_de440_full --features spkezr_validation --release -- --nocapture --ignored - name: Rust-SPICE hermite validation run: | - RUST_BACKTRACE=1 RUST_LOG=debug cargo test validate_hermite_type13_from_gmat --features validation --release -- --nocapture --ignored - RUST_BACKTRACE=1 RUST_LOG=debug cargo test validate_hermite_type13_with_varying_segment_sizes --features validation --release -- --nocapture --ignored + RUST_BACKTRACE=1 RUST_LOG=debug cargo test validate_hermite_type13_from_gmat --features spkezr_validation --release -- --nocapture --ignored + RUST_BACKTRACE=1 RUST_LOG=debug cargo test validate_hermite_type13_with_varying_segment_sizes --features spkezr_validation --release -- --nocapture --ignored # Now analyze the results and create pretty plots - uses: actions/setup-python@v4 diff --git a/Cargo.toml b/Cargo.toml index 961aa865..cb6a2812 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,7 +7,7 @@ description = "ANISE provides a toolkit and files for Attitude, Navigation, Inst homepage = "https://github.com/anise-toolkit/" documentation = "https://docs.rs/anise/" repository = "https://github.com/anise-toolkit/anise.rs" -keywords = ["attitude","navigation","instrument", "spacecraft", "ephemeris"] +keywords = ["attitude", "navigation", "instrument", "spacecraft", "ephemeris"] categories = ["science", "simulation"] readme = "README.md" license = "MPL-2.0" @@ -19,8 +19,8 @@ exclude = ["cspice"] hifitime = "3.8" memmap2 = "0.7.0" crc32fast = "1.3.0" -der = {version = "0.7.8", features = ["derive", "alloc", "real"]} -clap = {version = "3.1", features = ["derive"]} +der = { version = "0.7.8", features = ["derive", "alloc", "real"] } +clap = { version = "3.1", features = ["derive"] } thiserror = "1.0" log = "0.4" pretty_env_logger = "0.5" @@ -28,11 +28,12 @@ tabled = "0.14" const_format = "0.2" nalgebra = "0.32" approx = "0.5.1" -zerocopy = {version = "0.7.3", features = ["derive"]} +zerocopy = { version = "0.7.3", features = ["derive"] } bytes = "1.4.0" -snafu = "0.7.4" +snafu = { version = "0.7.4", features = ["backtrace"] } lexical-core = "0.8.5" heapless = "0.7.16" +rstest = "0.18.2" [dev-dependencies] rust-spice = "0.7.4" @@ -40,12 +41,13 @@ parquet = "46.0.0" arrow = "46.0.0" criterion = "0.5" iai = "0.1" -polars = {version = "0.33", features = ["lazy", "parquet"]} +polars = { version = "0.33", features = ["lazy", "parquet"] } rayon = "1.7" [features] -default = ["validation"] -validation = [] # Enabling this flag significantly reduces compilation times due to Arrow and Polars. +default = ["spkezr_validation"] +# Enabling this flag significantly reduces compilation times due to Arrow and Polars. +spkezr_validation = [] [profile.bench] debug = true diff --git a/benches/crit_jpl_ephemerides.rs b/benches/crit_jpl_ephemerides.rs index 35c972c6..3629a6c2 100644 --- a/benches/crit_jpl_ephemerides.rs +++ b/benches/crit_jpl_ephemerides.rs @@ -5,8 +5,6 @@ use anise::{ }; use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use spice; - const NUM_QUERIES_PER_PAIR: f64 = 100.0; fn benchmark_spice_single_hop_type2_cheby(time_it: TimeSeries) { @@ -24,7 +22,7 @@ fn benchmark_spice_single_hop_type2_cheby(time_it: TimeSeries) { fn benchmark_anise_single_hop_type2_cheby(ctx: &Almanac, time_it: TimeSeries) { for epoch in time_it { black_box( - ctx.translate_from_to_km_s_geometric(EARTH_J2000, LUNA_J2000, epoch) + ctx.translate_from_to_geometric(EARTH_J2000, LUNA_J2000, epoch) .unwrap(), ); } @@ -40,7 +38,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { let path = "./data/de440s.bsp"; let buf = file2heap!(path).unwrap(); let spk = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk).unwrap(); + let ctx = Almanac::from_spk(spk).unwrap(); // Load SPICE data spice::furnsh("data/de440s.bsp"); diff --git a/benches/crit_spacecraft_ephemeris.rs b/benches/crit_spacecraft_ephemeris.rs index a1ea8f04..83fa0e31 100644 --- a/benches/crit_spacecraft_ephemeris.rs +++ b/benches/crit_spacecraft_ephemeris.rs @@ -2,8 +2,6 @@ use anise::{constants::frames::EARTH_J2000, file2heap, prelude::*}; use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use spice; - const NUM_QUERIES: f64 = 100.0; fn benchmark_spice_single_hop_type13_hermite(time_it: TimeSeries) { @@ -27,7 +25,7 @@ fn benchmark_anise_single_hop_type13_hermite(ctx: &Almanac, time_it: TimeSeries) let my_sc_j2k = Frame::from_ephem_j2000(-10000001); for epoch in time_it { black_box( - ctx.translate_from_to_km_s_geometric(my_sc_j2k, EARTH_J2000, epoch) + ctx.translate_from_to_geometric(my_sc_j2k, EARTH_J2000, epoch) .unwrap(), ); } @@ -46,9 +44,9 @@ pub fn criterion_benchmark(c: &mut Criterion) { let buf = file2heap!("data/gmat-hermite.bsp").unwrap(); let spacecraft = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk) + let ctx = Almanac::from_spk(spk) .unwrap() - .load_spk(&spacecraft) + .load_spk(spacecraft) .unwrap(); // Load SPICE data diff --git a/benches/iai_jpl_ephemerides.rs b/benches/iai_jpl_ephemerides.rs index 498dc396..f3a0defb 100644 --- a/benches/iai_jpl_ephemerides.rs +++ b/benches/iai_jpl_ephemerides.rs @@ -5,7 +5,6 @@ use anise::{ }; use iai::black_box; -use spice; const NUM_QUERIES_PER_PAIR: f64 = 100.0; @@ -40,11 +39,11 @@ fn benchmark_anise_single_hop_type2_cheby() { let path = "./data/de440s.bsp"; let buf = file2heap!(path).unwrap(); let spk = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk).unwrap(); + let ctx = Almanac::from_spk(spk).unwrap(); for epoch in time_it { black_box( - ctx.translate_from_to_km_s_geometric(EARTH_J2000, LUNA_J2000, epoch) + ctx.translate_from_to_geometric(EARTH_J2000, LUNA_J2000, epoch) .unwrap(), ); } diff --git a/benches/iai_spacecraft_ephemeris.rs b/benches/iai_spacecraft_ephemeris.rs index fc98630e..c6f8bbe9 100644 --- a/benches/iai_spacecraft_ephemeris.rs +++ b/benches/iai_spacecraft_ephemeris.rs @@ -1,7 +1,6 @@ use anise::{constants::frames::EARTH_J2000, file2heap, prelude::*}; use iai::black_box; -use spice; fn benchmark_spice_single_hop_type13_hermite() { let epoch = Epoch::from_gregorian_hms(2000, 1, 1, 14, 0, 0, TimeScale::UTC); @@ -30,15 +29,15 @@ fn benchmark_anise_single_hop_type13_hermite() { let buf = file2heap!("data/gmat-hermite.bsp").unwrap(); let spacecraft = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk) + let ctx = Almanac::from_spk(spk) .unwrap() - .load_spk(&spacecraft) + .load_spk(spacecraft) .unwrap(); let my_sc_j2k = Frame::from_ephem_j2000(-10000001); black_box( - ctx.translate_from_to_km_s_geometric(my_sc_j2k, EARTH_J2000, epoch) + ctx.translate_from_to_geometric(my_sc_j2k, EARTH_J2000, epoch) .unwrap(), ); } diff --git a/src/almanac/bpc.rs b/src/almanac/bpc.rs index 3e748593..e6d8b09f 100644 --- a/src/almanac/bpc.rs +++ b/src/almanac/bpc.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -10,16 +10,17 @@ use hifitime::Epoch; -use crate::errors::AniseError; +use crate::naif::daf::DAFError; use crate::naif::pck::BPCSummaryRecord; use crate::naif::BPC; +use crate::orientations::OrientationError; use log::error; use super::{Almanac, MAX_LOADED_BPCS}; impl<'a: 'b, 'b> Almanac<'a> { /// Loads a Binary Planetary Constants kernel. - pub fn load_bpc(&self, bpc: &'b BPC) -> Result, AniseError> { + pub fn load_bpc(&self, bpc: BPC) -> Result, OrientationError> { // This is just a bunch of pointers so it doesn't use much memory. let mut me = self.clone(); let mut data_idx = MAX_LOADED_BPCS; @@ -30,7 +31,9 @@ impl<'a: 'b, 'b> Almanac<'a> { } } if data_idx == MAX_LOADED_BPCS { - return Err(AniseError::StructureIsFull); + return Err(OrientationError::StructureIsFull { + max_slots: MAX_LOADED_BPCS, + }); } me.bpc_data[data_idx] = Some(bpc); Ok(me) @@ -38,7 +41,7 @@ impl<'a: 'b, 'b> Almanac<'a> { pub fn num_loaded_bpc(&self) -> usize { let mut count = 0; - for maybe in self.bpc_data { + for maybe in &self.bpc_data { if maybe.is_none() { break; } else { @@ -54,7 +57,7 @@ impl<'a: 'b, 'b> Almanac<'a> { &self, name: &str, epoch: Epoch, - ) -> Result<(&BPCSummaryRecord, usize, usize), AniseError> { + ) -> Result<(&BPCSummaryRecord, usize, usize), OrientationError> { for (no, maybe_bpc) in self .bpc_data .iter() @@ -62,15 +65,22 @@ impl<'a: 'b, 'b> Almanac<'a> { .rev() .enumerate() { - let bpc = maybe_bpc.unwrap(); + let bpc = maybe_bpc.as_ref().unwrap(); if let Ok((summary, idx_in_bpc)) = bpc.summary_from_name_at_epoch(name, epoch) { return Ok((summary, no, idx_in_bpc)); } } // If we're reached this point, there is no relevant summary at this epoch. - error!("Context: No summary {name} valid at epoch {epoch}"); - Err(AniseError::MissingInterpolationData(epoch)) + error!("Almanac: No summary {name} valid at epoch {epoch}"); + Err(OrientationError::BPC { + action: "searching for BPC summary", + source: DAFError::SummaryNameAtEpochError { + kind: "BPC", + name: name.to_string(), + epoch, + }, + }) } /// Returns the summary given the name of the summary record if that summary has data defined at the requested epoch @@ -78,8 +88,7 @@ impl<'a: 'b, 'b> Almanac<'a> { &self, id: i32, epoch: Epoch, - ) -> Result<(&BPCSummaryRecord, usize, usize), AniseError> { - // TODO: Consider a return type here + ) -> Result<(&BPCSummaryRecord, usize, usize), OrientationError> { for (no, maybe_bpc) in self .bpc_data .iter() @@ -87,23 +96,30 @@ impl<'a: 'b, 'b> Almanac<'a> { .rev() .enumerate() { - let bpc = maybe_bpc.unwrap(); + let bpc = maybe_bpc.as_ref().unwrap(); if let Ok((summary, idx_in_bpc)) = bpc.summary_from_id_at_epoch(id, epoch) { // NOTE: We're iterating backward, so the correct BPC number is "total loaded" minus "current iteration". return Ok((summary, self.num_loaded_bpc() - no - 1, idx_in_bpc)); } } - error!("Context: No summary {id} valid at epoch {epoch}"); + error!("Almanac: No summary {id} valid at epoch {epoch}"); // If we're reached this point, there is no relevant summary at this epoch. - Err(AniseError::MissingInterpolationData(epoch)) + Err(OrientationError::BPC { + action: "searching for BPC summary", + source: DAFError::SummaryIdAtEpochError { + kind: "BPC", + id, + epoch, + }, + }) } /// Returns the summary given the name of the summary record. pub fn bpc_summary_from_name( &self, name: &str, - ) -> Result<(&BPCSummaryRecord, usize, usize), AniseError> { + ) -> Result<(&BPCSummaryRecord, usize, usize), OrientationError> { for (bpc_no, maybe_bpc) in self .bpc_data .iter() @@ -111,20 +127,28 @@ impl<'a: 'b, 'b> Almanac<'a> { .rev() .enumerate() { - let bpc = maybe_bpc.unwrap(); + let bpc = maybe_bpc.as_ref().unwrap(); if let Ok((summary, idx_in_bpc)) = bpc.summary_from_name(name) { return Ok((summary, bpc_no, idx_in_bpc)); } } // If we're reached this point, there is no relevant summary at this epoch. - error!("Context: No summary {name} valid"); - Err(AniseError::NoInterpolationData) + error!("Almanac: No summary {name} valid"); + Err(OrientationError::BPC { + action: "searching for BPC summary", + source: DAFError::SummaryNameError { + kind: "BPC", + name: name.to_string(), + }, + }) } /// Returns the summary given the name of the summary record if that summary has data defined at the requested epoch - pub fn bpc_summary(&self, id: i32) -> Result<(&BPCSummaryRecord, usize, usize), AniseError> { - // TODO: Consider a return type here + pub fn bpc_summary( + &self, + id: i32, + ) -> Result<(&BPCSummaryRecord, usize, usize), OrientationError> { for (no, maybe_bpc) in self .bpc_data .iter() @@ -132,15 +156,49 @@ impl<'a: 'b, 'b> Almanac<'a> { .rev() .enumerate() { - let bpc = maybe_bpc.unwrap(); + let bpc = maybe_bpc.as_ref().unwrap(); if let Ok((summary, idx_in_bpc)) = bpc.summary_from_id(id) { // NOTE: We're iterating backward, so the correct BPC number is "total loaded" minus "current iteration". return Ok((summary, self.num_loaded_bpc() - no - 1, idx_in_bpc)); } } - error!("Context: No summary {id} valid"); + error!("Almanac: No summary {id} valid"); // If we're reached this point, there is no relevant summary - Err(AniseError::NoInterpolationData) + Err(OrientationError::BPC { + action: "searching for BPC summary", + source: DAFError::SummaryIdError { kind: "BPC", id }, + }) + } +} + +#[cfg(test)] +mod ut_almanac_bpc { + use crate::prelude::{Almanac, Epoch}; + + #[test] + fn summaries_nothing_loaded() { + let almanac = Almanac::default(); + + let e = Epoch::now().unwrap(); + + assert!( + almanac.bpc_summary(0).is_err(), + "empty Almanac should report an error" + ); + assert!( + almanac.bpc_summary_at_epoch(0, e).is_err(), + "empty Almanac should report an error" + ); + assert!( + almanac.bpc_summary_from_name("invalid name").is_err(), + "empty Almanac should report an error" + ); + assert!( + almanac + .bpc_summary_from_name_at_epoch("invalid name", e) + .is_err(), + "empty Almanac should report an error" + ); } } diff --git a/src/almanac/mod.rs b/src/almanac/mod.rs index f2fba6f7..18503a2e 100644 --- a/src/almanac/mod.rs +++ b/src/almanac/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -9,9 +9,7 @@ */ use crate::naif::{BPC, SPK}; -use crate::structure::dataset::DataSet; -use crate::structure::planetocentric::PlanetaryData; -use crate::structure::spacecraft::SpacecraftData; +use crate::structure::{PlanetaryDataSet, SpacecraftDataSet}; use core::fmt; // TODO: Switch these to build constants so that it's configurable when building the library. @@ -21,6 +19,7 @@ pub const MAX_SPACECRAFT_DATA: usize = 16; pub const MAX_PLANETARY_DATA: usize = 64; pub mod bpc; +pub mod planetary; pub mod spk; /// An Almanac contains all of the loaded SPICE and ANISE data. @@ -30,13 +29,13 @@ pub mod spk; #[derive(Clone, Default)] pub struct Almanac<'a> { /// NAIF SPK is kept unchanged - pub spk_data: [Option<&'a SPK>; MAX_LOADED_SPKS], + pub spk_data: [Option; MAX_LOADED_SPKS], /// NAIF BPC is kept unchanged - pub bpc_data: [Option<&'a BPC>; MAX_LOADED_BPCS], + pub bpc_data: [Option; MAX_LOADED_BPCS], /// Dataset of planetary data - pub planetary_data: DataSet<'a, PlanetaryData, MAX_PLANETARY_DATA>, + pub planetary_data: PlanetaryDataSet<'a>, /// Dataset of spacecraft data - pub spacecraft_data: DataSet<'a, SpacecraftData<'a>, MAX_SPACECRAFT_DATA>, + pub spacecraft_data: SpacecraftDataSet<'a>, } impl<'a> fmt::Display for Almanac<'a> { diff --git a/src/almanac/planetary.rs b/src/almanac/planetary.rs new file mode 100644 index 00000000..39f75953 --- /dev/null +++ b/src/almanac/planetary.rs @@ -0,0 +1,40 @@ +use crate::{ + prelude::{Frame, FrameUid}, + structure::dataset::DataSetError, +}; + +/* + * ANISE Toolkit + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. + * + * Documentation: https://nyxspace.com/ + */ +use super::Almanac; +use snafu::prelude::*; + +#[derive(Debug, Snafu, PartialEq)] +#[snafu(visibility(pub(crate)))] +pub enum PlanetaryDataError { + #[snafu(display("when {action} {source}"))] + PlanetaryDataSet { + action: &'static str, + source: DataSetError, + }, +} + +impl<'a: 'b, 'b> Almanac<'a> { + /// Given the frame UID (or something that can be transformed into it), attempt to retrieve the full frame information, if that frame is loaded + pub fn frame_from_uid>(&self, uid: U) -> Result { + let uid = uid.into(); + Ok(self + .planetary_data + .get_by_id(uid.ephemeris_id) + .with_context(|_| PlanetaryDataSetSnafu { + action: "fetching frame by its UID via ephemeris_id", + })? + .to_frame(uid)) + } +} diff --git a/src/almanac/spk.rs b/src/almanac/spk.rs index 8cce6c1a..09e54e08 100644 --- a/src/almanac/spk.rs +++ b/src/almanac/spk.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -10,23 +10,23 @@ use hifitime::Epoch; -use crate::errors::AniseError; +use crate::ephemerides::EphemerisError; +use crate::naif::daf::DAFError; use crate::naif::spk::summary::SPKSummaryRecord; use crate::naif::SPK; use log::error; -// TODO: Switch these to build constants so that it's configurable when building the library. use super::{Almanac, MAX_LOADED_SPKS}; impl<'a: 'b, 'b> Almanac<'a> { - pub fn from_spk(spk: &'a SPK) -> Result, AniseError> { + pub fn from_spk(spk: SPK) -> Result, EphemerisError> { let me = Self::default(); me.load_spk(spk) } /// Loads a new SPK file into a new context. /// This new context is needed to satisfy the unloading of files. In fact, to unload a file, simply let the newly loaded context drop out of scope and Rust will clean it up. - pub fn load_spk(&self, spk: &'b SPK) -> Result, AniseError> { + pub fn load_spk(&self, spk: SPK) -> Result, EphemerisError> { // This is just a bunch of pointers so it doesn't use much memory. let mut me = self.clone(); // Parse as SPK and place into the SPK list if there is room @@ -38,7 +38,9 @@ impl<'a: 'b, 'b> Almanac<'a> { } } if data_idx == MAX_LOADED_SPKS { - return Err(AniseError::StructureIsFull); + return Err(EphemerisError::StructureIsFull { + max_slots: MAX_LOADED_SPKS, + }); } me.spk_data[data_idx] = Some(spk); Ok(me) @@ -46,7 +48,7 @@ impl<'a: 'b, 'b> Almanac<'a> { pub fn num_loaded_spk(&self) -> usize { let mut count = 0; - for maybe in self.spk_data { + for maybe in &self.spk_data { if maybe.is_none() { break; } else { @@ -62,7 +64,7 @@ impl<'a: 'b, 'b> Almanac<'a> { &self, name: &str, epoch: Epoch, - ) -> Result<(&SPKSummaryRecord, usize, usize), AniseError> { + ) -> Result<(&SPKSummaryRecord, usize, usize), EphemerisError> { for (spk_no, maybe_spk) in self .spk_data .iter() @@ -70,15 +72,22 @@ impl<'a: 'b, 'b> Almanac<'a> { .rev() .enumerate() { - let spk = maybe_spk.unwrap(); + let spk = maybe_spk.as_ref().unwrap(); if let Ok((summary, idx_in_spk)) = spk.summary_from_name_at_epoch(name, epoch) { return Ok((summary, spk_no, idx_in_spk)); } } // If we're reached this point, there is no relevant summary at this epoch. - error!("Context: No summary {name} valid at epoch {epoch}"); - Err(AniseError::MissingInterpolationData(epoch)) + error!("Almanac: No summary {name} valid at epoch {epoch}"); + Err(EphemerisError::SPK { + action: "searching for SPK summary", + source: DAFError::SummaryNameAtEpochError { + kind: "SPK", + name: name.to_string(), + epoch, + }, + }) } /// Returns the summary given the name of the summary record if that summary has data defined at the requested epoch @@ -86,8 +95,7 @@ impl<'a: 'b, 'b> Almanac<'a> { &self, id: i32, epoch: Epoch, - ) -> Result<(&SPKSummaryRecord, usize, usize), AniseError> { - // TODO: Consider a return type here + ) -> Result<(&SPKSummaryRecord, usize, usize), EphemerisError> { for (spk_no, maybe_spk) in self .spk_data .iter() @@ -95,23 +103,30 @@ impl<'a: 'b, 'b> Almanac<'a> { .rev() .enumerate() { - let spk = maybe_spk.unwrap(); + let spk = maybe_spk.as_ref().unwrap(); if let Ok((summary, idx_in_spk)) = spk.summary_from_id_at_epoch(id, epoch) { // NOTE: We're iterating backward, so the correct SPK number is "total loaded" minus "current iteration". return Ok((summary, self.num_loaded_spk() - spk_no - 1, idx_in_spk)); } } - error!("Context: No summary {id} valid at epoch {epoch}"); + error!("Almanac: No summary {id} valid at epoch {epoch}"); // If we're reached this point, there is no relevant summary at this epoch. - Err(AniseError::MissingInterpolationData(epoch)) + Err(EphemerisError::SPK { + action: "searching for SPK summary", + source: DAFError::SummaryIdAtEpochError { + kind: "SPK", + id, + epoch, + }, + }) } /// Returns the summary given the name of the summary record. pub fn spk_summary_from_name( &self, name: &str, - ) -> Result<(&SPKSummaryRecord, usize, usize), AniseError> { + ) -> Result<(&SPKSummaryRecord, usize, usize), EphemerisError> { for (spk_no, maybe_spk) in self .spk_data .iter() @@ -119,20 +134,29 @@ impl<'a: 'b, 'b> Almanac<'a> { .rev() .enumerate() { - let spk = maybe_spk.unwrap(); + let spk = maybe_spk.as_ref().unwrap(); if let Ok((summary, idx_in_spk)) = spk.summary_from_name(name) { return Ok((summary, spk_no, idx_in_spk)); } } // If we're reached this point, there is no relevant summary at this epoch. - error!("Context: No summary {name} valid"); - Err(AniseError::NoInterpolationData) + error!("Almanac: No summary {name} valid"); + + Err(EphemerisError::SPK { + action: "searching for SPK summary", + source: DAFError::SummaryNameError { + kind: "SPK", + name: name.to_string(), + }, + }) } /// Returns the summary given the name of the summary record if that summary has data defined at the requested epoch - pub fn spk_summary(&self, id: i32) -> Result<(&SPKSummaryRecord, usize, usize), AniseError> { - // TODO: Consider a return type here + pub fn spk_summary( + &self, + id: i32, + ) -> Result<(&SPKSummaryRecord, usize, usize), EphemerisError> { for (spk_no, maybe_spk) in self .spk_data .iter() @@ -140,15 +164,74 @@ impl<'a: 'b, 'b> Almanac<'a> { .rev() .enumerate() { - let spk = maybe_spk.unwrap(); + let spk = maybe_spk.as_ref().unwrap(); if let Ok((summary, idx_in_spk)) = spk.summary_from_id(id) { // NOTE: We're iterating backward, so the correct SPK number is "total loaded" minus "current iteration". return Ok((summary, self.num_loaded_spk() - spk_no - 1, idx_in_spk)); } } - error!("Context: No summary {id} valid"); + error!("Almanac: No summary {id} valid"); // If we're reached this point, there is no relevant summary - Err(AniseError::NoInterpolationData) + Err(EphemerisError::SPK { + action: "searching for SPK summary", + source: DAFError::SummaryIdError { kind: "SPK", id }, + }) + } +} + +#[cfg(test)] +mod ut_almanac_spk { + use crate::{ + constants::frames::{EARTH_J2000, LUNA_J2000}, + prelude::{Almanac, Epoch}, + }; + + #[test] + fn summaries_nothing_loaded() { + let almanac = Almanac::default(); + let e = Epoch::now().unwrap(); + + assert!( + almanac.spk_summary(0).is_err(), + "empty Almanac should report an error" + ); + assert!( + almanac.spk_summary_at_epoch(0, e).is_err(), + "empty Almanac should report an error" + ); + assert!( + almanac.spk_summary_from_name("invalid name").is_err(), + "empty Almanac should report an error" + ); + assert!( + almanac + .spk_summary_from_name_at_epoch("invalid name", e) + .is_err(), + "empty Almanac should report an error" + ); + } + + #[test] + fn queries_nothing_loaded() { + let almanac = Almanac::default(); + let e = Epoch::now().unwrap(); + + assert!( + almanac.try_find_context_center().is_err(), + "empty Almanac should report an error" + ); + + assert!( + almanac.ephemeris_path_to_root(LUNA_J2000, e).is_err(), + "empty Almanac should report an error" + ); + + assert!( + almanac + .common_ephemeris_path(LUNA_J2000, EARTH_J2000, e) + .is_err(), + "empty Almanac should report an error" + ); } } diff --git a/src/astro/mod.rs b/src/astro/mod.rs index 0ef7a0be..67667bd9 100644 --- a/src/astro/mod.rs +++ b/src/astro/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,6 +8,8 @@ * Documentation: https://nyxspace.com/ */ +use crate::errors::PhysicsError; + /// Defines the aberration corrections to the state of the target body to account for one-way light time and stellar aberration. /// **WARNING:** This enum is a placeholder until [https://github.com/anise-toolkit/anise.rs/issues/26] is implemented. #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -17,3 +19,5 @@ pub enum Aberration { pub mod orbit; pub mod orbit_geodetic; + +pub type PhysicsResult = Result; diff --git a/src/astro/orbit.rs b/src/astro/orbit.rs index 91ad6f9c..1501782c 100644 --- a/src/astro/orbit.rs +++ b/src/astro/orbit.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,26 +8,33 @@ * Documentation: https://nyxspace.com/ */ +use super::PhysicsResult; use crate::{ - errors::PhysicsErrorKind, + errors::{ + HyperbolicTrueAnomalySnafu, InfiniteValueSnafu, ParabolicEccentricitySnafu, + ParabolicSemiParamSnafu, PhysicsError, RadiusSnafu, + }, math::{ angles::{between_0_360, between_pm_180}, - cartesian::Cartesian, + cartesian::CartesianState, Vector3, Vector6, }, - prelude::{CelestialFrame, CelestialFrameTrait}, + prelude::Frame, }; use core::f64::consts::PI; use core::f64::EPSILON; +use core::fmt; use hifitime::{Duration, Epoch, TimeUnits}; use log::{error, info, warn}; +use snafu::ensure; /// If an orbit has an eccentricity below the following value, it is considered circular (only affects warning messages) pub const ECC_EPSILON: f64 = 1e-11; -pub type Orbit = Cartesian; +/// A helper type alias, but no assumptions are made on the underlying validity of the frame. +pub type Orbit = CartesianState; -impl Cartesian { +impl CartesianState { /// Attempts to create a new Orbit around the provided Celestial or Geoid frame from the Keplerian orbital elements. /// /// **Units:** km, none, degrees, degrees, degrees, degrees @@ -45,13 +52,11 @@ impl Cartesian { aop: f64, ta: f64, epoch: Epoch, - frame: F, - ) -> Result { - if frame.mu_km3_s2().abs() < EPSILON { - warn!( - "GM is near zero ({}): expect math errors in Keplerian to Cartesian conversion", - frame.mu_km3_s2() - ); + frame: Frame, + ) -> PhysicsResult { + let mu_km3_s2 = frame.mu_km3_s2()?; + if mu_km3_s2.abs() < EPSILON { + warn!("GM is near zero ({mu_km3_s2} km^3/s^2): expect rounding errors!",); } // Algorithm from GMAT's StateConversionUtil::KeplerianToCartesian let ecc = if ecc < 0.0 { @@ -73,24 +78,24 @@ impl Cartesian { // GMAT errors below one meter. Let's warn for below that, but not panic, might be useful for landing scenarios? warn!("radius of periapsis is less than one meter"); } - if (1.0 - ecc).abs() < ECC_EPSILON { - error!("parabolic orbits have ill-defined Keplerian orbital elements"); - return Err(PhysicsErrorKind::ParabolicOrbit); - } + ensure!( + (1.0 - ecc).abs() >= ECC_EPSILON, + ParabolicEccentricitySnafu { limit: ECC_EPSILON } + ); if ecc > 1.0 { - let ta = between_0_360(ta); - if ta > (PI - (1.0 / ecc).acos()).to_degrees() { - error!( - "true anomaly value ({}) physically impossible for a hyperbolic orbit", - ta - ); - return Err(PhysicsErrorKind::InvalidHyperbolicTrueAnomaly(ta)); - } - } - if (1.0 + ecc * ta.to_radians().cos()).is_infinite() { - error!("radius of orbit is infinite"); - return Err(PhysicsErrorKind::InfiniteValue); + let ta_deg = between_0_360(ta); + ensure!( + ta_deg <= (PI - (1.0 / ecc).acos()).to_degrees(), + HyperbolicTrueAnomalySnafu { ta_deg } + ); } + ensure!( + (1.0 + ecc * ta.to_radians().cos()).is_finite(), + InfiniteValueSnafu { + action: "computing radius of orbit" + } + ); + // Done with all the warnings and errors supported by GMAT // The conversion algorithm itself comes from GMAT's StateConversionUtil::ComputeKeplToCart // NOTE: GMAT supports mean anomaly instead of true anomaly, but only for backward compatibility reasons @@ -101,10 +106,7 @@ impl Cartesian { let ta = ta.to_radians(); let p = sma * (1.0 - ecc.powi(2)); - if p.abs() < EPSILON { - error!("Semilatus rectum ~= 0.0: parabolic orbit"); - return Err(PhysicsErrorKind::ParabolicOrbit); - } + ensure!(p.abs() >= EPSILON, ParabolicSemiParamSnafu { p }); // NOTE: At this point GMAT computes 1+ecc**2 and checks whether it's very small. // It then reports that the radius may be too large. We've effectively already done @@ -117,7 +119,7 @@ impl Cartesian { let x = radius * (cos_aop_ta * cos_raan - cos_inc * sin_aop_ta * sin_raan); let y = radius * (cos_aop_ta * sin_raan + cos_inc * sin_aop_ta * cos_raan); let z = radius * sin_aop_ta * sin_inc; - let sqrt_gm_p = (frame.mu_km3_s2() / p).sqrt(); + let sqrt_gm_p = (mu_km3_s2 / p).sqrt(); let cos_ta_ecc = ta.cos() + ecc; let sin_ta = ta.sin(); @@ -130,7 +132,6 @@ impl Cartesian { Ok(Self { radius_km: Vector3::new(x, y, z), velocity_km_s: Vector3::new(vx, vy, vz), - acceleration_km_s2: None, epoch, frame, }) @@ -145,23 +146,32 @@ impl Cartesian { raan: f64, aop: f64, ta: f64, - dt: Epoch, - frame: F, - ) -> Result { + epoch: Epoch, + frame: Frame, + ) -> PhysicsResult { + ensure!( + r_a > EPSILON, + RadiusSnafu { + action: "radius of apoapsis is negative" + } + ); + ensure!( + r_p > EPSILON, + RadiusSnafu { + action: "radius of periapsis is negative" + } + ); + // The two checks above ensure that sma > 0 let sma = (r_a + r_p) / 2.0; let ecc = r_a / sma - 1.0; - Self::try_keplerian(sma, ecc, inc, raan, aop, ta, dt, frame) + Self::try_keplerian(sma, ecc, inc, raan, aop, ta, epoch, frame) } /// Attempts to create a new Orbit around the provided frame from the borrowed state vector /// /// The state vector **must** be sma, ecc, inc, raan, aop, ta. This function is a shortcut to `cartesian` /// and as such it has the same unit requirements. - pub fn try_keplerian_vec( - state: &Vector6, - epoch: Epoch, - frame: F, - ) -> Result { + pub fn try_keplerian_vec(state: &Vector6, epoch: Epoch, frame: Frame) -> PhysicsResult { Self::try_keplerian( state[0], state[1], state[2], state[3], state[4], state[5], epoch, frame, ) @@ -184,7 +194,7 @@ impl Cartesian { aop: f64, ta: f64, epoch: Epoch, - frame: F, + frame: Frame, ) -> Self { Self::try_keplerian(sma, ecc, inc, raan, aop, ta, epoch, frame).unwrap() } @@ -198,279 +208,317 @@ impl Cartesian { raan: f64, aop: f64, ta: f64, - dt: Epoch, - frame: F, + epoch: Epoch, + frame: Frame, ) -> Self { - Self::try_keplerian_apsis_radii(r_a, r_p, inc, raan, aop, ta, dt, frame).unwrap() + Self::try_keplerian_apsis_radii(r_a, r_p, inc, raan, aop, ta, epoch, frame).unwrap() } /// Creates a new Orbit around the provided frame from the borrowed state vector /// /// The state vector **must** be sma, ecc, inc, raan, aop, ta. This function is a shortcut to `cartesian` /// and as such it has the same unit requirements. - pub fn keplerian_vec(state: &Vector6, epoch: Epoch, frame: F) -> Self { + pub fn keplerian_vec(state: &Vector6, epoch: Epoch, frame: Frame) -> Self { Self::try_keplerian_vec(state, epoch, frame).unwrap() } /// Returns this state as a Keplerian Vector6 in [km, none, degrees, degrees, degrees, degrees] /// /// Note that the time is **not** returned in the vector. - pub fn to_keplerian_vec(self) -> Vector6 { - Vector6::new( - self.sma_km(), - self.ecc(), - self.inc_deg(), - self.raan_deg(), - self.aop_deg(), - self.ta_deg(), - ) + pub fn to_keplerian_vec(self) -> PhysicsResult { + Ok(Vector6::new( + self.sma_km()?, + self.ecc()?, + self.inc_deg()?, + self.raan_deg()?, + self.aop_deg()?, + self.ta_deg()?, + )) } /// Returns the orbital momentum vector - pub fn hvec(&self) -> Vector3 { - self.radius_km.cross(&self.velocity_km_s) + pub fn hvec(&self) -> PhysicsResult { + ensure!( + self.rmag_km() > EPSILON, + RadiusSnafu { + action: "cannot compute orbital momentum vector with zero radius" + } + ); + ensure!( + self.vmag_km_s() > EPSILON, + RadiusSnafu { + action: "cannot compute orbital momentum vector with zero velocity" + } + ); + Ok(self.radius_km.cross(&self.velocity_km_s)) } /// Returns the orbital momentum value on the X axis - pub fn hx(&self) -> f64 { - self.hvec()[0] + pub fn hx(&self) -> PhysicsResult { + Ok(self.hvec()?[0]) } /// Returns the orbital momentum value on the Y axis - pub fn hy(&self) -> f64 { - self.hvec()[1] + pub fn hy(&self) -> PhysicsResult { + Ok(self.hvec()?[1]) } /// Returns the orbital momentum value on the Z axis - pub fn hz(&self) -> f64 { - self.hvec()[2] + pub fn hz(&self) -> PhysicsResult { + Ok(self.hvec()?[2]) } /// Returns the norm of the orbital momentum - pub fn hmag(&self) -> f64 { - self.hvec().norm() + pub fn hmag(&self) -> PhysicsResult { + Ok(self.hvec()?.norm()) } /// Returns the specific mechanical energy in km^2/s^2 - pub fn energy_km2_s2(&self) -> f64 { - self.vmag_km_s().powi(2) / 2.0 - self.frame.mu_km3_s2() / self.rmag_km() + pub fn energy_km2_s2(&self) -> PhysicsResult { + ensure!( + self.rmag_km() > EPSILON, + RadiusSnafu { + action: "cannot compute energy with zero radial state" + } + ); + Ok(self.vmag_km_s().powi(2) / 2.0 - self.frame.mu_km3_s2()? / self.rmag_km()) } /// Returns the semi-major axis in km - pub fn sma_km(&self) -> f64 { - -self.frame.mu_km3_s2() / (2.0 * self.energy_km2_s2()) + pub fn sma_km(&self) -> PhysicsResult { + // Division by zero prevented in energy_km2_s2 + Ok(-self.frame.mu_km3_s2()? / (2.0 * self.energy_km2_s2()?)) } /// Mutates this orbit to change the SMA - pub fn set_sma(&mut self, new_sma_km: f64) { + pub fn set_sma(&mut self, new_sma_km: f64) -> PhysicsResult<()> { let me = Self::keplerian( new_sma_km, - self.ecc(), - self.inc_deg(), - self.raan_deg(), - self.aop_deg(), - self.ta_deg(), + self.ecc()?, + self.inc_deg()?, + self.raan_deg()?, + self.aop_deg()?, + self.ta_deg()?, self.epoch, self.frame, ); *self = me; + + Ok(()) } /// Returns a copy of the state with a new SMA - pub fn with_sma(self, new_sma_km: f64) -> Self { + pub fn with_sma(self, new_sma_km: f64) -> PhysicsResult { let mut me = self; - me.set_sma(new_sma_km); - me + me.set_sma(new_sma_km)?; + Ok(me) } /// Returns a copy of the state with a provided SMA added to the current one - pub fn add_sma(self, delta_sma: f64) -> Self { + pub fn add_sma(self, delta_sma: f64) -> PhysicsResult { let mut me = self; - me.set_sma(me.sma_km() + delta_sma); - me + me.set_sma(me.sma_km()? + delta_sma)?; + Ok(me) } /// Returns the period in seconds - pub fn period(&self) -> Duration { - 2.0 * PI - * (self.sma_km().powi(3) / self.frame.mu_km3_s2()) + pub fn period(&self) -> PhysicsResult { + Ok(2.0 + * PI + * (self.sma_km()?.powi(3) / self.frame.mu_km3_s2()?) .sqrt() - .seconds() + .seconds()) } /// Returns the eccentricity vector (no unit) - pub fn evec(&self) -> Vector3 { + pub fn evec(&self) -> Result { let r = self.radius_km; + ensure!( + self.rmag_km() > EPSILON, + RadiusSnafu { + action: "cannot compute eccentricity vector with zero radial state" + } + ); let v = self.velocity_km_s; - ((v.norm().powi(2) - self.frame.mu_km3_s2() / r.norm()) * r - (r.dot(&v)) * v) - / self.frame.mu_km3_s2() + Ok( + ((v.norm().powi(2) - self.frame.mu_km3_s2()? / r.norm()) * r - (r.dot(&v)) * v) + / self.frame.mu_km3_s2()?, + ) } /// Returns the eccentricity (no unit) - pub fn ecc(&self) -> f64 { - self.evec().norm() + pub fn ecc(&self) -> PhysicsResult { + Ok(self.evec()?.norm()) } /// Mutates this orbit to change the ECC - pub fn set_ecc(&mut self, new_ecc: f64) { + pub fn set_ecc(&mut self, new_ecc: f64) -> PhysicsResult<()> { let me = Self::keplerian( - self.sma_km(), + self.sma_km()?, new_ecc, - self.inc_deg(), - self.raan_deg(), - self.aop_deg(), - self.ta_deg(), + self.inc_deg()?, + self.raan_deg()?, + self.aop_deg()?, + self.ta_deg()?, self.epoch, self.frame, ); *self = me; + + Ok(()) } /// Returns a copy of the state with a new ECC - pub fn with_ecc(self, new_ecc: f64) -> Self { + pub fn with_ecc(self, new_ecc: f64) -> PhysicsResult { let mut me = self; - me.set_ecc(new_ecc); - me + me.set_ecc(new_ecc)?; + Ok(me) } /// Returns a copy of the state with a provided ECC added to the current one - pub fn add_ecc(self, delta_ecc: f64) -> Self { + pub fn add_ecc(self, delta_ecc: f64) -> PhysicsResult { let mut me = self; - me.set_ecc(me.ecc() + delta_ecc); - me + me.set_ecc(me.ecc()? + delta_ecc)?; + Ok(me) } /// Returns the inclination in degrees - pub fn inc_deg(&self) -> f64 { - (self.hvec()[2] / self.hmag()).acos().to_degrees() + pub fn inc_deg(&self) -> PhysicsResult { + Ok((self.hvec()?[2] / self.hmag()?).acos().to_degrees()) } /// Mutates this orbit to change the INC - pub fn set_inc_deg(&mut self, new_inc_deg: f64) { + pub fn set_inc_deg(&mut self, new_inc_deg: f64) -> PhysicsResult<()> { let me = Self::keplerian( - self.sma_km(), - self.ecc(), + self.sma_km()?, + self.ecc()?, new_inc_deg, - self.raan_deg(), - self.aop_deg(), - self.ta_deg(), + self.raan_deg()?, + self.aop_deg()?, + self.ta_deg()?, self.epoch, self.frame, ); *self = me; + + Ok(()) } /// Returns a copy of the state with a new INC - pub fn with_inc_deg(self, new_inc_deg: f64) -> Self { + pub fn with_inc_deg(self, new_inc_deg: f64) -> PhysicsResult { let mut me = self; - me.set_inc_deg(new_inc_deg); - me + me.set_inc_deg(new_inc_deg)?; + Ok(me) } /// Returns a copy of the state with a provided INC added to the current one - pub fn add_inc_deg(self, delta_inc_deg: f64) -> Self { + pub fn add_inc_deg(self, delta_inc_deg: f64) -> PhysicsResult { let mut me = self; - me.set_inc_deg(me.inc_deg() + delta_inc_deg); - me + me.set_inc_deg(me.inc_deg()? + delta_inc_deg)?; + Ok(me) } /// Returns the argument of periapsis in degrees - pub fn aop_deg(&self) -> f64 { - let n = Vector3::new(0.0, 0.0, 1.0).cross(&self.hvec()); - let cos_aop = n.dot(&self.evec()) / (n.norm() * self.ecc()); + pub fn aop_deg(&self) -> PhysicsResult { + let n = Vector3::new(0.0, 0.0, 1.0).cross(&self.hvec()?); + let cos_aop = n.dot(&self.evec()?) / (n.norm() * self.ecc()?); let aop = cos_aop.acos(); if aop.is_nan() { if cos_aop > 1.0 { - 180.0 + Ok(180.0) } else { - 0.0 + Ok(0.0) } - } else if self.evec()[2] < 0.0 { - (2.0 * PI - aop).to_degrees() + } else if self.evec()?[2] < 0.0 { + Ok((2.0 * PI - aop).to_degrees()) } else { - aop.to_degrees() + Ok(aop.to_degrees()) } } /// Mutates this orbit to change the AOP - pub fn set_aop_deg(&mut self, new_aop_deg: f64) { + pub fn set_aop_deg(&mut self, new_aop_deg: f64) -> PhysicsResult<()> { let me = Self::keplerian( - self.sma_km(), - self.ecc(), - self.inc_deg(), - self.raan_deg(), + self.sma_km()?, + self.ecc()?, + self.inc_deg()?, + self.raan_deg()?, new_aop_deg, - self.ta_deg(), + self.ta_deg()?, self.epoch, self.frame, ); *self = me; + + Ok(()) } /// Returns a copy of the state with a new AOP - pub fn with_aop_deg(self, new_aop_deg: f64) -> Self { + pub fn with_aop_deg(self, new_aop_deg: f64) -> PhysicsResult { let mut me = self; - me.set_aop_deg(new_aop_deg); - me + me.set_aop_deg(new_aop_deg)?; + Ok(me) } /// Returns a copy of the state with a provided AOP added to the current one - pub fn add_aop_deg(self, delta_aop_deg: f64) -> Self { + pub fn add_aop_deg(self, delta_aop_deg: f64) -> PhysicsResult { let mut me = self; - me.set_aop_deg(me.aop_deg() + delta_aop_deg); - me + me.set_aop_deg(me.aop_deg()? + delta_aop_deg)?; + Ok(me) } /// Returns the right ascension of ther ascending node in degrees - pub fn raan_deg(&self) -> f64 { - let n = Vector3::new(0.0, 0.0, 1.0).cross(&self.hvec()); + pub fn raan_deg(&self) -> PhysicsResult { + let n = Vector3::new(0.0, 0.0, 1.0).cross(&self.hvec()?); let cos_raan = n[0] / n.norm(); let raan = cos_raan.acos(); if raan.is_nan() { if cos_raan > 1.0 { - 180.0 + Ok(180.0) } else { - 0.0 + Ok(0.0) } } else if n[1] < 0.0 { - (2.0 * PI - raan).to_degrees() + Ok((2.0 * PI - raan).to_degrees()) } else { - raan.to_degrees() + Ok(raan.to_degrees()) } } /// Mutates this orbit to change the RAAN - pub fn set_raan_deg(&mut self, new_raan_deg: f64) { + pub fn set_raan_deg(&mut self, new_raan_deg: f64) -> PhysicsResult<()> { let me = Self::keplerian( - self.sma_km(), - self.ecc(), - self.inc_deg(), + self.sma_km()?, + self.ecc()?, + self.inc_deg()?, new_raan_deg, - self.aop_deg(), - self.ta_deg(), + self.aop_deg()?, + self.ta_deg()?, self.epoch, self.frame, ); *self = me; + + Ok(()) } /// Returns a copy of the state with a new RAAN - pub fn with_raan_deg(self, new_raan_deg: f64) -> Self { + pub fn with_raan_deg(self, new_raan_deg: f64) -> PhysicsResult { let mut me = self; - me.set_raan_deg(new_raan_deg); - me + me.set_raan_deg(new_raan_deg)?; + Ok(me) } /// Returns a copy of the state with a provided RAAN added to the current one - pub fn add_raan_deg(self, delta_raan_deg: f64) -> Self { + pub fn add_raan_deg(self, delta_raan_deg: f64) -> PhysicsResult { let mut me = self; - me.set_raan_deg(me.raan_deg() + delta_raan_deg); - me + me.set_raan_deg(me.raan_deg()? + delta_raan_deg)?; + Ok(me) } /// Returns the true anomaly in degrees between 0 and 360.0 @@ -481,72 +529,70 @@ impl Cartesian { /// LIMITATION: For an orbit whose true anomaly is (very nearly) 0.0 or 180.0, this function may return either 0.0 or 180.0 with a very small time increment. /// This is due to the precision of the cosine calculation: if the arccosine calculation is out of bounds, the sign of the cosine of the true anomaly is used /// to determine whether the true anomaly should be 0.0 or 180.0. **In other words**, there is an ambiguity in the computation in the true anomaly exactly at 180.0 and 0.0. - pub fn ta_deg(&self) -> f64 { - if self.ecc() < ECC_EPSILON { + pub fn ta_deg(&self) -> PhysicsResult { + if self.ecc()? < ECC_EPSILON { warn!( "true anomaly ill-defined for circular orbit (e = {})", - self.ecc() + self.ecc()? ); } - let cos_nu = self.evec().dot(&self.radius_km) / (self.ecc() * self.rmag_km()); + let cos_nu = self.evec()?.dot(&self.radius_km) / (self.ecc()? * self.rmag_km()); // If we're close the valid bounds, let's just do a sign check and return the true anomaly let ta = cos_nu.acos(); if ta.is_nan() { if cos_nu > 1.0 { - 180.0 + Ok(180.0) } else { - 0.0 + Ok(0.0) } } else if self.radius_km.dot(&self.velocity_km_s) < 0.0 { - (2.0 * PI - ta).to_degrees() + Ok((2.0 * PI - ta).to_degrees()) } else { - ta.to_degrees() + Ok(ta.to_degrees()) } } /// Mutates this orbit to change the TA - pub fn set_ta_deg(&mut self, new_ta_deg: f64) { + pub fn set_ta_deg(&mut self, new_ta_deg: f64) -> PhysicsResult<()> { let me = Self::keplerian( - self.sma_km(), - self.ecc(), - self.inc_deg(), - self.raan_deg(), - self.aop_deg(), + self.sma_km()?, + self.ecc()?, + self.inc_deg()?, + self.raan_deg()?, + self.aop_deg()?, new_ta_deg, self.epoch, self.frame, ); *self = me; + + Ok(()) } /// Returns a copy of the state with a new TA - pub fn with_ta_deg(self, new_ta_deg: f64) -> Self { + pub fn with_ta_deg(self, new_ta_deg: f64) -> PhysicsResult { let mut me = self; - me.set_ta_deg(new_ta_deg); - me + me.set_ta_deg(new_ta_deg)?; + Ok(me) } /// Returns a copy of the state with a provided TA added to the current one - pub fn add_ta_deg(self, delta_ta_deg: f64) -> Self { + pub fn add_ta_deg(self, delta_ta_deg: f64) -> PhysicsResult { let mut me = self; - me.set_ta_deg(me.ta_deg() + delta_ta_deg); - me + me.set_ta_deg(me.ta_deg()? + delta_ta_deg)?; + Ok(me) } /// Returns a copy of this state with the provided apoasis and periapsis - pub fn with_apoapsis_periapsis_km( - self, - new_ra_km: f64, - new_rp_km: f64, - ) -> Result { + pub fn with_apoapsis_periapsis_km(self, new_ra_km: f64, new_rp_km: f64) -> PhysicsResult { Self::try_keplerian_apsis_radii( new_ra_km, new_rp_km, - self.inc_deg(), - self.raan_deg(), - self.aop_deg(), - self.ta_deg(), + self.inc_deg()?, + self.raan_deg()?, + self.aop_deg()?, + self.ta_deg()?, self.epoch, self.frame, ) @@ -557,94 +603,96 @@ impl Cartesian { self, delta_ra_km: f64, delta_rp_km: f64, - ) -> Result { + ) -> PhysicsResult { Self::try_keplerian_apsis_radii( - self.apoapsis_km() + delta_ra_km, - self.periapsis_km() + delta_rp_km, - self.inc_deg(), - self.raan_deg(), - self.aop_deg(), - self.ta_deg(), + self.apoapsis_km()? + delta_ra_km, + self.periapsis_km()? + delta_rp_km, + self.inc_deg()?, + self.raan_deg()?, + self.aop_deg()?, + self.ta_deg()?, self.epoch, self.frame, ) } /// Returns the true longitude in degrees - pub fn tlong_deg(&self) -> f64 { + pub fn tlong_deg(&self) -> PhysicsResult { // Angles already in degrees - between_0_360(self.aop_deg() + self.raan_deg() + self.ta_deg()) + Ok(between_0_360( + self.aop_deg()? + self.raan_deg()? + self.ta_deg()?, + )) } /// Returns the argument of latitude in degrees /// /// NOTE: If the orbit is near circular, the AoL will be computed from the true longitude /// instead of relying on the ill-defined true anomaly. - pub fn aol_deg(&self) -> f64 { - between_0_360(if self.ecc() < ECC_EPSILON { - self.tlong_deg() - self.raan_deg() + pub fn aol_deg(&self) -> PhysicsResult { + Ok(between_0_360(if self.ecc()? < ECC_EPSILON { + self.tlong_deg()? - self.raan_deg()? } else { - self.aop_deg() + self.ta_deg() - }) + self.aop_deg()? + self.ta_deg()? + })) } /// Returns the radius of periapsis (or perigee around Earth), in kilometers. - pub fn periapsis_km(&self) -> f64 { - self.sma_km() * (1.0 - self.ecc()) + pub fn periapsis_km(&self) -> PhysicsResult { + Ok(self.sma_km()? * (1.0 - self.ecc()?)) } /// Returns the radius of apoapsis (or apogee around Earth), in kilometers. - pub fn apoapsis_km(&self) -> f64 { - self.sma_km() * (1.0 + self.ecc()) + pub fn apoapsis_km(&self) -> PhysicsResult { + Ok(self.sma_km()? * (1.0 + self.ecc()?)) } /// Returns the eccentric anomaly in degrees /// /// This is a conversion from GMAT's StateConversionUtil::TrueToEccentricAnomaly - pub fn ea_deg(&self) -> f64 { - let (sin_ta, cos_ta) = self.ta_deg().to_radians().sin_cos(); - let ecc_cos_ta = self.ecc() * cos_ta; - let sin_ea = ((1.0 - self.ecc().powi(2)).sqrt() * sin_ta) / (1.0 + ecc_cos_ta); - let cos_ea = (self.ecc() + cos_ta) / (1.0 + ecc_cos_ta); + pub fn ea_deg(&self) -> PhysicsResult { + let (sin_ta, cos_ta) = self.ta_deg()?.to_radians().sin_cos(); + let ecc_cos_ta = self.ecc()? * cos_ta; + let sin_ea = ((1.0 - self.ecc()?.powi(2)).sqrt() * sin_ta) / (1.0 + ecc_cos_ta); + let cos_ea = (self.ecc()? + cos_ta) / (1.0 + ecc_cos_ta); // The atan2 function is a bit confusing: https://doc.rust-lang.org/std/primitive.f64.html#method.atan2 . - sin_ea.atan2(cos_ea).to_degrees() + Ok(sin_ea.atan2(cos_ea).to_degrees()) } /// Returns the flight path angle in degrees - pub fn fpa_deg(&self) -> f64 { - let nu = self.ta_deg().to_radians(); - let ecc = self.ecc(); + pub fn fpa_deg(&self) -> PhysicsResult { + let nu = self.ta_deg()?.to_radians(); + let ecc = self.ecc()?; let denom = (1.0 + 2.0 * ecc * nu.cos() + ecc.powi(2)).sqrt(); let sin_fpa = ecc * nu.sin() / denom; let cos_fpa = 1.0 + ecc * nu.cos() / denom; - sin_fpa.atan2(cos_fpa).to_degrees() + Ok(sin_fpa.atan2(cos_fpa).to_degrees()) } /// Returns the mean anomaly in degrees /// /// This is a conversion from GMAT's StateConversionUtil::TrueToMeanAnomaly - pub fn ma_deg(&self) -> f64 { - if self.ecc().abs() < ECC_EPSILON { - error!("parabolic orbit: setting mean anomaly to 0.0"); - 0.0 - } else if self.ecc() < 1.0 { - between_0_360( - (self.ea_deg().to_radians() - self.ecc() * self.ea_deg().to_radians().sin()) + pub fn ma_deg(&self) -> PhysicsResult { + if self.ecc()?.abs() < ECC_EPSILON { + Err(PhysicsError::ParabolicEccentricity { limit: ECC_EPSILON }) + } else if self.ecc()? < 1.0 { + Ok(between_0_360( + (self.ea_deg()?.to_radians() - self.ecc()? * self.ea_deg()?.to_radians().sin()) .to_degrees(), - ) + )) } else { - info!("computing the hyperbolic anomaly"); // From GMAT's TrueToHyperbolicAnomaly - ((self.ta_deg().to_radians().sin() * (self.ecc().powi(2) - 1.0)).sqrt() - / (1.0 + self.ecc() * self.ta_deg().to_radians().cos())) - .asinh() - .to_degrees() + Ok( + ((self.ta_deg()?.to_radians().sin() * (self.ecc()?.powi(2) - 1.0)).sqrt() + / (1.0 + self.ecc()? * self.ta_deg()?.to_radians().cos())) + .asinh() + .to_degrees(), + ) } } /// Returns the semi parameter (or semilatus rectum) - pub fn semi_parameter_km(&self) -> f64 { - self.sma_km() * (1.0 - self.ecc().powi(2)) + pub fn semi_parameter_km(&self) -> PhysicsResult { + Ok(self.sma_km()? * (1.0 - self.ecc()?.powi(2))) } /// Returns whether this state satisfies the requirement to compute the Mean Brouwer Short orbital @@ -654,19 +702,19 @@ impl Cartesian { /// The details are at the log level `info`. /// NOTE: Mean Brouwer Short are only defined around Earth. However, `nyx` does *not* check the /// main celestial body around which the state is defined (GMAT does perform this verification). - pub fn is_brouwer_short_valid(&self) -> bool { - if self.inc_deg() > 180.0 { + pub fn is_brouwer_short_valid(&self) -> PhysicsResult { + if self.inc_deg()? > 180.0 { info!("Brouwer Mean Short only applicable for inclinations less than 180.0"); - false - } else if self.ecc() >= 1.0 || self.ecc() < 0.0 { + Ok(false) + } else if self.ecc()? >= 1.0 || self.ecc()? < 0.0 { info!("Brouwer Mean Short only applicable for elliptical orbits"); - false - } else if self.periapsis_km() < 3000.0 { + Ok(false) + } else if self.periapsis_km()? < 3000.0 { // NOTE: GMAT emits a warning if the periagee is less than the Earth radius, but we do not do that here. info!("Brouwer Mean Short only applicable for if perigee is greater than 3000 km"); - false + Ok(false) } else { - true + Ok(true) } } @@ -681,11 +729,12 @@ impl Cartesian { } /// Returns the semi minor axis in km, includes code for a hyperbolic orbit - pub fn semi_minor_axis_km(&self) -> f64 { - if self.ecc() <= 1.0 { - ((self.sma_km() * self.ecc()).powi(2) - self.sma_km().powi(2)).sqrt() + pub fn semi_minor_axis_km(&self) -> PhysicsResult { + if self.ecc()? <= 1.0 { + Ok(((self.sma_km()? * self.ecc()?).powi(2) - self.sma_km()?.powi(2)).sqrt()) } else { - self.hmag().powi(2) / (self.frame.mu_km3_s2() * (self.ecc().powi(2) - 1.0).sqrt()) + Ok(self.hmag()?.powi(2) + / (self.frame.mu_km3_s2()? * (self.ecc()?.powi(2) - 1.0).sqrt())) } } @@ -699,7 +748,50 @@ impl Cartesian { } /// Returns the $C_3$ of this orbit in km^2/s^2 - pub fn c3_km2_s2(&self) -> f64 { - -self.frame.mu_km3_s2() / self.sma_km() + pub fn c3_km2_s2(&self) -> PhysicsResult { + Ok(-self.frame.mu_km3_s2()? / self.sma_km()?) + } +} + +#[allow(clippy::format_in_format_args)] +impl fmt::LowerHex for Orbit { + // Prints the Keplerian orbital elements in floating point with units + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if !self.frame.is_celestial() { + error!("you must update the frame from the Almanac before printing this state's orbital parameters"); + Err(fmt::Error) + } else { + let decimals = f.precision().unwrap_or(6); + write!( + f, + "[{:x}] {}\tsma = {} km\tecc = {}\tinc = {} deg\traan = {} deg\taop = {} deg\tta = {} deg", + self.frame, + self.epoch, + format!("{:.*}", decimals, self.sma_km().map_err(|err| { + error!("{err}"); + fmt::Error + })?), + format!("{:.*}", decimals, self.ecc().map_err(|err| { + error!("{err}"); + fmt::Error + })?), + format!("{:.*}", decimals, self.inc_deg().map_err(|err| { + error!("{err}"); + fmt::Error + })?), + format!("{:.*}", decimals, self.raan_deg().map_err(|err| { + error!("{err}"); + fmt::Error + })?), + format!("{:.*}", decimals, self.aop_deg().map_err(|err| { + error!("{err}"); + fmt::Error + })?), + format!("{:.*}", decimals, self.ta_deg().map_err(|err| { + error!("{err}"); + fmt::Error + })?), + ) + } } } diff --git a/src/astro/orbit_geodetic.rs b/src/astro/orbit_geodetic.rs index e300e808..eb4f84e6 100644 --- a/src/astro/orbit_geodetic.rs +++ b/src/astro/orbit_geodetic.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,21 +8,19 @@ * Documentation: https://nyxspace.com/ */ +use super::PhysicsResult; use crate::{ - errors::PhysicsErrorKind, math::{ angles::{between_0_360, between_pm_180}, - cartesian::Cartesian, + cartesian::CartesianState, Vector3, }, - prelude::{GeodeticFrame, GeodeticFrameTrait}, + prelude::Frame, }; use hifitime::Epoch; use log::error; -pub type GeodeticOrbit = Cartesian; - -impl Cartesian { +impl CartesianState { /// Creates a new Orbit from the provided semi-major axis altitude in kilometers #[allow(clippy::too_many_arguments)] pub fn try_keplerian_altitude( @@ -32,17 +30,17 @@ impl Cartesian { raan: f64, aop: f64, ta: f64, - dt: Epoch, - frame: F, - ) -> Result { + epoch: Epoch, + frame: Frame, + ) -> PhysicsResult { Self::try_keplerian( - sma_altitude + frame.mean_equatorial_radius_km(), + sma_altitude + frame.mean_equatorial_radius_km()?, ecc, inc, raan, aop, ta, - dt, + epoch, frame, ) } @@ -56,17 +54,17 @@ impl Cartesian { raan: f64, aop: f64, ta: f64, - dt: Epoch, - frame: F, - ) -> Result { + epoch: Epoch, + frame: Frame, + ) -> PhysicsResult { Self::try_keplerian_apsis_radii( - a_a + frame.mean_equatorial_radius_km(), - a_p + frame.mean_equatorial_radius_km(), + a_a + frame.mean_equatorial_radius_km()?, + a_p + frame.mean_equatorial_radius_km()?, inc, raan, aop, ta, - dt, + epoch, frame, ) } @@ -81,46 +79,46 @@ impl Cartesian { longitude_deg: f64, height_km: f64, angular_velocity: f64, - dt: Epoch, - frame: F, - ) -> Self { - let e2 = 2.0 * frame.flattening() - frame.flattening().powi(2); + epoch: Epoch, + frame: Frame, + ) -> PhysicsResult { + let e2 = 2.0 * frame.flattening()? - frame.flattening()?.powi(2); let (sin_long, cos_long) = longitude_deg.to_radians().sin_cos(); let (sin_lat, cos_lat) = latitude_deg.to_radians().sin_cos(); // page 144 - let c_body = frame.semi_major_radius_km() / ((1.0 - e2 * sin_lat.powi(2)).sqrt()); - let s_body = (frame.semi_major_radius_km() * (1.0 - frame.flattening()).powi(2)) + let c_body = frame.semi_major_radius_km()? / ((1.0 - e2 * sin_lat.powi(2)).sqrt()); + let s_body = (frame.semi_major_radius_km()? * (1.0 - frame.flattening()?).powi(2)) / ((1.0 - e2 * sin_lat.powi(2)).sqrt()); let ri = (c_body + height_km) * cos_lat * cos_long; let rj = (c_body + height_km) * cos_lat * sin_long; let rk = (s_body + height_km) * sin_lat; let radius = Vector3::new(ri, rj, rk); let velocity = Vector3::new(0.0, 0.0, angular_velocity).cross(&radius); - Self::new( + Ok(Self::new( radius[0], radius[1], radius[2], velocity[0], velocity[1], velocity[2], - dt, + epoch, frame, - ) + )) } /// Returns the SMA altitude in km - pub fn sma_altitude(&self) -> f64 { - self.sma_km() - self.frame.mean_equatorial_radius_km() + pub fn sma_altitude(&self) -> PhysicsResult { + Ok(self.sma_km()? - self.frame.mean_equatorial_radius_km()?) } /// Returns the altitude of periapsis (or perigee around Earth), in kilometers. - pub fn periapsis_altitude(&self) -> f64 { - self.periapsis_km() - self.frame.mean_equatorial_radius_km() + pub fn periapsis_altitude(&self) -> PhysicsResult { + Ok(self.periapsis_km()? - self.frame.mean_equatorial_radius_km()?) } /// Returns the altitude of apoapsis (or apogee around Earth), in kilometers. - pub fn apoapsis_altitude(&self) -> f64 { - self.apoapsis_km() - self.frame.mean_equatorial_radius_km() + pub fn apoapsis_altitude(&self) -> PhysicsResult { + Ok(self.apoapsis_km()? - self.frame.mean_equatorial_radius_km()?) } /// Returns the geodetic longitude (λ) in degrees. Value is between 0 and 360 degrees. @@ -134,26 +132,26 @@ impl Cartesian { /// Returns the geodetic latitude (φ) in degrees. Value is between -180 and +180 degrees. /// /// Reference: Vallado, 4th Ed., Algorithm 12 page 172. - pub fn geodetic_latitude(&self) -> f64 { + pub fn geodetic_latitude(&self) -> PhysicsResult { let eps = 1e-12; let max_attempts = 20; let mut attempt_no = 0; let r_delta = (self.radius_km.x.powi(2) + self.radius_km.y.powi(2)).sqrt(); let mut latitude = (self.radius_km.z / self.rmag_km()).asin(); - let e2 = self.frame.flattening() * (2.0 - self.frame.flattening()); + let e2 = self.frame.flattening()? * (2.0 - self.frame.flattening()?); loop { attempt_no += 1; let c_earth = - self.frame.semi_major_radius_km() / ((1.0 - e2 * (latitude).sin().powi(2)).sqrt()); + self.frame.semi_major_radius_km()? / ((1.0 - e2 * (latitude).sin().powi(2)).sqrt()); let new_latitude = (self.radius_km.z + c_earth * e2 * (latitude).sin()).atan2(r_delta); if (latitude - new_latitude).abs() < eps { - return between_pm_180(new_latitude.to_degrees()); + return Ok(between_pm_180(new_latitude.to_degrees())); } else if attempt_no >= max_attempts { error!( "geodetic latitude failed to converge -- error = {}", (latitude - new_latitude).abs() ); - return between_pm_180(new_latitude.to_degrees()); + return Ok(between_pm_180(new_latitude.to_degrees())); } latitude = new_latitude; } @@ -162,20 +160,21 @@ impl Cartesian { /// Returns the geodetic height in km. /// /// Reference: Vallado, 4th Ed., Algorithm 12 page 172. - pub fn geodetic_height(&self) -> f64 { - let e2 = self.frame.flattening() * (2.0 - self.frame.flattening()); - let latitude = self.geodetic_latitude().to_radians(); + pub fn geodetic_height(&self) -> PhysicsResult { + let e2 = self.frame.flattening()? * (2.0 - self.frame.flattening()?); + let latitude = self.geodetic_latitude()?.to_radians(); let sin_lat = latitude.sin(); if (latitude - 1.0).abs() < 0.1 { // We are near poles, let's use another formulation. - let s_earth = (self.frame.semi_major_radius_km() - * (1.0 - self.frame.flattening()).powi(2)) + let s_earth = (self.frame.semi_major_radius_km()? + * (1.0 - self.frame.flattening()?).powi(2)) / ((1.0 - e2 * sin_lat.powi(2)).sqrt()); - self.radius_km.z / latitude.sin() - s_earth + Ok(self.radius_km.z / latitude.sin() - s_earth) } else { - let c_earth = self.frame.semi_major_radius_km() / ((1.0 - e2 * sin_lat.powi(2)).sqrt()); + let c_earth = + self.frame.semi_major_radius_km()? / ((1.0 - e2 * sin_lat.powi(2)).sqrt()); let r_delta = (self.radius_km.x.powi(2) + self.radius_km.y.powi(2)).sqrt(); - r_delta / latitude.cos() - c_earth + Ok(r_delta / latitude.cos() - c_earth) } } } diff --git a/src/bin/anise/main.rs b/src/bin/anise/main.rs index d6c388f9..910d3b4f 100644 --- a/src/bin/anise/main.rs +++ b/src/bin/anise/main.rs @@ -1,9 +1,11 @@ extern crate pretty_env_logger; use std::env::{set_var, var}; +use snafu::prelude::*; + use anise::cli::args::{Actions, Args}; use anise::cli::inspect::{BpcRow, SpkRow}; -use anise::cli::CliErrors; +use anise::cli::{AniseSnafu, CliDAFSnafu, CliDataSetSnafu, CliErrors, CliFileRecordSnafu}; use anise::file2heap; use anise::naif::daf::{FileRecord, NAIFRecord, NAIFSummaryRecord}; use anise::naif::kpl::parser::convert_tpc; @@ -13,7 +15,7 @@ use anise::structure::metadata::Metadata; use anise::structure::planetocentric::PlanetaryData; use anise::structure::spacecraft::SpacecraftData; use clap::Parser; -use log::{error, info}; +use log::info; use tabled::{settings::Style, Table}; use zerocopy::FromBytes; @@ -35,188 +37,140 @@ fn main() -> Result<(), CliErrors> { crc32_checksum, } => { let path_str = file.clone(); - match file2heap!(file) { - Ok(bytes) => { - // Try to load this as a dataset by first trying to load the metadata - if let Ok(metadata) = Metadata::decode_header(&bytes) { - // Now, we can load this depending on the kind of data that it is - match metadata.dataset_type { - DataSetType::NotApplicable => unreachable!("no such ANISE data yet"), - DataSetType::SpacecraftData => { - // Decode as spacecraft data - let dataset = - DataSet::::try_from_bytes(&bytes)?; - println!("{dataset}"); - Ok(()) - } - DataSetType::PlanetaryData => { - // Decode as planetary data - let dataset = DataSet::::try_from_bytes(&bytes)?; - println!("{dataset}"); - Ok(()) - } - } - } else { - // Load the header only - let file_record = - FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); - match file_record - .identification() - .map_err(CliErrors::AniseError)? - { - "PCK" => { - info!("Loading {path_str:?} as DAF/PCK"); - match BPC::check_then_parse(bytes, crc32_checksum) { - Ok(_) => { - info!("[OK] Checksum matches"); - Ok(()) - } - Err(AniseError::IntegrityError(e)) => { - error!("CRC32 checksums differ for {path_str:?}: {e:?}"); - Err(CliErrors::AniseError(AniseError::IntegrityError(e))) - } - Err(e) => { - error!("Some other error happened when loading {path_str:?}: {e:?}"); - Err(CliErrors::AniseError(e)) - } - } - } - "SPK" => { - info!("Loading {path_str:?} as DAF/SPK"); - match SPK::check_then_parse(bytes, crc32_checksum) { - Ok(_) => { - info!("[OK] Checksum matches"); - Ok(()) - } - Err(AniseError::IntegrityError(e)) => { - error!("CRC32 checksums differ for {path_str:?}: {e:?}"); - Err(CliErrors::AniseError(AniseError::IntegrityError(e))) - } - Err(e) => { - error!("Some other error happened when loading {path_str:?}: {e:?}"); - Err(CliErrors::AniseError(e)) - } - } - } - _ => unreachable!(), - } + let bytes = file2heap!(file).with_context(|_| AniseSnafu)?; + // Try to load this as a dataset by first trying to load the metadata + if let Ok(metadata) = Metadata::decode_header(&bytes) { + // Now, we can load this depending on the kind of data that it is + match metadata.dataset_type { + DataSetType::NotApplicable => unreachable!("no such ANISE data yet"), + DataSetType::SpacecraftData => { + // Decode as spacecraft data + let dataset = DataSet::::try_from_bytes(&bytes) + .with_context(|_| CliDataSetSnafu)?; + println!("{dataset}"); + Ok(()) + } + DataSetType::PlanetaryData => { + // Decode as planetary data + let dataset = DataSet::::try_from_bytes(&bytes) + .with_context(|_| CliDataSetSnafu)?; + println!("{dataset}"); + Ok(()) + } + } + } else { + // Load the header only + let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); + match file_record + .identification() + .with_context(|_| CliFileRecordSnafu)? + { + "PCK" => { + info!("Loading {path_str:?} as DAF/PCK"); + BPC::check_then_parse(bytes, crc32_checksum) + .with_context(|_| CliDAFSnafu)?; + info!("[OK] Checksum matches"); + Ok(()) + } + "SPK" => { + info!("Loading {path_str:?} as DAF/SPK"); + SPK::check_then_parse(bytes, crc32_checksum) + .with_context(|_| CliDAFSnafu)?; + info!("[OK] Checksum matches"); + Ok(()) } + _ => unreachable!(), } - Err(e) => Err(e.into()), } } Actions::Inspect { file } => { let path_str = file.clone(); - match file2heap!(file) { - Ok(bytes) => { - // Load the header only - let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); - - match file_record - .identification() - .map_err(CliErrors::AniseError)? - { - "PCK" => { - info!("Loading {path_str:?} as DAF/PCK"); - match BPC::parse(bytes) { - Ok(pck) => { - info!("CRC32 checksum: 0x{:X}", pck.crc32()); - if let Some(comments) = pck.comments()? { - println!("== COMMENTS ==\n{}== END ==", comments); - } else { - println!("(File has no comments)"); - } - // Build the rows of the table - let mut rows = Vec::new(); - - for (sno, summary) in - pck.data_summaries().unwrap().iter().enumerate() - { - let name = pck - .name_record - .nth_name(sno, pck.file_record.summary_size()); - if summary.is_empty() { - continue; - } - rows.push(BpcRow { - name, - start_epoch: format!("{:E}", summary.start_epoch()), - end_epoch: format!("{:E}", summary.end_epoch()), - duration: summary.end_epoch() - summary.start_epoch(), - interpolation_kind: format!("{}", summary.data_type_i), - frame: format!("{}", summary.frame_id), - inertial_frame: format!( - "{}", - summary.inertial_frame_id - ), - }); - } - - let mut tbl = Table::new(rows); - tbl.with(Style::modern()); - println!("{tbl}"); - - Ok(()) - } - Err(e) => { - error!("Some other error happened when loading {path_str:?}: {e:?}"); - Err(CliErrors::AniseError(e)) - } - } + let bytes = file2heap!(file).with_context(|_| AniseSnafu)?; + // Load the header only + let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); + + match file_record + .identification() + .with_context(|_| CliFileRecordSnafu)? + { + "PCK" => { + info!("Loading {path_str:?} as DAF/PCK"); + let pck = BPC::parse(bytes).with_context(|_| CliDAFSnafu)?; + info!("CRC32 checksum: 0x{:X}", pck.crc32()); + if let Some(comments) = pck.comments().with_context(|_| CliDAFSnafu)? { + println!("== COMMENTS ==\n{}== END ==", comments); + } else { + println!("(File has no comments)"); + } + // Build the rows of the table + let mut rows = Vec::new(); + + for (sno, summary) in pck.data_summaries().unwrap().iter().enumerate() { + let name_rcrd = pck.name_record().unwrap(); + let name = + name_rcrd.nth_name(sno, pck.file_record().unwrap().summary_size()); + if summary.is_empty() { + continue; } - "SPK" => { - info!("Loading {path_str:?} as DAF/SPK"); - match SPK::parse(bytes) { - Ok(spk) => { - info!("CRC32 checksum: 0x{:X}", spk.crc32()); - if let Some(comments) = spk.comments()? { - println!("== COMMENTS ==\n{}== END ==", comments); - } else { - println!("(File has no comments)"); - } - // Build the rows of the table - let mut rows = Vec::new(); - - for (sno, summary) in - spk.data_summaries().unwrap().iter().enumerate() - { - let name = spk - .name_record - .nth_name(sno, spk.file_record.summary_size()); - if summary.is_empty() { - continue; - } - - rows.push(SpkRow { - name, - center: summary.center_id, - start_epoch: format!("{:E}", summary.start_epoch()), - end_epoch: format!("{:E}", summary.end_epoch()), - duration: summary.end_epoch() - summary.start_epoch(), - interpolation_kind: format!("{}", summary.data_type_i), - frame: format!("{}", summary.frame_id), - target: format!("{}", summary.target_id), - }); - } - - let mut tbl = Table::new(rows); - tbl.with(Style::modern()); - println!("{tbl}"); - - Ok(()) - } - Err(e) => { - error!("Some other error happened when loading {path_str:?}: {e:?}"); - Err(CliErrors::AniseError(e)) - } - } + rows.push(BpcRow { + name: name.to_string(), + start_epoch: format!("{:E}", summary.start_epoch()), + end_epoch: format!("{:E}", summary.end_epoch()), + duration: summary.end_epoch() - summary.start_epoch(), + interpolation_kind: format!("{}", summary.data_type_i), + frame: format!("{}", summary.frame_id), + inertial_frame: format!("{}", summary.inertial_frame_id), + }); + } + + let mut tbl = Table::new(rows); + tbl.with(Style::modern()); + println!("{tbl}"); + + Ok(()) + } + "SPK" => { + info!("Loading {path_str:?} as DAF/SPK"); + let spk = SPK::parse(bytes).with_context(|_| CliDAFSnafu)?; + + info!("CRC32 checksum: 0x{:X}", spk.crc32()); + if let Some(comments) = spk.comments().with_context(|_| CliDAFSnafu)? { + println!("== COMMENTS ==\n{}== END ==", comments); + } else { + println!("(File has no comments)"); + } + // Build the rows of the table + let mut rows = Vec::new(); + + for (sno, summary) in spk.data_summaries().unwrap().iter().enumerate() { + let name_rcrd = spk.name_record().unwrap(); + let name = + name_rcrd.nth_name(sno, spk.file_record().unwrap().summary_size()); + if summary.is_empty() { + continue; } - fileid => Err(CliErrors::ArgumentError(format!( - "{fileid} is not supported yet" - ))), + + rows.push(SpkRow { + name: name.to_string(), + center: summary.center_id, + start_epoch: format!("{:E}", summary.start_epoch()), + end_epoch: format!("{:E}", summary.end_epoch()), + duration: summary.end_epoch() - summary.start_epoch(), + interpolation_kind: format!("{}", summary.data_type_i), + frame: format!("{}", summary.frame_id), + target: format!("{}", summary.target_id), + }); } + + let mut tbl = Table::new(rows); + tbl.with(Style::modern()); + println!("{tbl}"); + + Ok(()) } - Err(e) => Err(e.into()), + fileid => Err(CliErrors::ArgumentError { + arg: format!("{fileid} is not supported yet"), + }), } } Actions::ConvertTpc { @@ -224,9 +178,11 @@ fn main() -> Result<(), CliErrors> { gmfile, outfile, } => { - let dataset = convert_tpc(pckfile, gmfile).map_err(CliErrors::AniseError)?; + let dataset = convert_tpc(pckfile, gmfile).with_context(|_| CliDataSetSnafu)?; - dataset.save_as(outfile, false)?; + dataset + .save_as(outfile, false) + .with_context(|_| CliDataSetSnafu)?; Ok(()) } diff --git a/src/cli/inspect.rs b/src/cli/inspect.rs index c6dcaadc..89854f7f 100644 --- a/src/cli/inspect.rs +++ b/src/cli/inspect.rs @@ -2,8 +2,8 @@ use hifitime::Duration; use tabled::Tabled; #[derive(Tabled)] -pub struct BpcRow<'a> { - pub name: &'a str, +pub struct BpcRow { + pub name: String, pub start_epoch: String, pub end_epoch: String, pub duration: Duration, @@ -13,8 +13,8 @@ pub struct BpcRow<'a> { } #[derive(Tabled)] -pub struct SpkRow<'a> { - pub name: &'a str, +pub struct SpkRow { + pub name: String, pub target: String, pub frame: String, pub center: i32, diff --git a/src/cli/mod.rs b/src/cli/mod.rs index fd336867..b49a2764 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -1,23 +1,40 @@ extern crate clap; extern crate tabled; -extern crate thiserror; +use snafu::prelude::*; use std::io; -use thiserror::Error; - -use crate::prelude::AniseError; +use crate::{ + naif::daf::{file_record::FileRecordError, DAFError}, + prelude::InputOutputError, + structure::dataset::DataSetError, +}; pub mod args; pub mod inspect; -#[derive(Debug, Error)] +#[derive(Debug, Snafu)] +#[snafu(visibility(pub))] pub enum CliErrors { - #[error("File not found or unreadable")] - FileNotFound(#[from] io::Error), - #[error("ANISE error encountered")] - AniseError(#[from] AniseError), - #[error("{0}")] - ArgumentError(String), + /// File not found or unreadable + FileNotFound { + source: io::Error, + }, + /// ANISE error encountered" + CliDAF { + source: DAFError, + }, + CliFileRecord { + source: FileRecordError, + }, + ArgumentError { + arg: String, + }, + CliDataSet { + source: DataSetError, + }, + AniseError { + source: InputOutputError, + }, } diff --git a/src/constants.rs b/src/constants.rs index 910dc8d9..767af4be 100644 --- a/src/constants.rs +++ b/src/constants.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -11,8 +11,6 @@ pub mod celestial_objects { use crate::NaifId; - // TODO: Merge with id_to_human_name - pub const SOLAR_SYSTEM_BARYCENTER: NaifId = 0; pub const MERCURY: NaifId = 1; pub const VENUS: NaifId = 2; @@ -27,7 +25,7 @@ pub mod celestial_objects { pub const LUNA: NaifId = 301; pub const EARTH: NaifId = 399; - pub const fn celestial_name_from_id<'a>(id: NaifId) -> Option<&'a str> { + pub const fn celestial_name_from_id(id: NaifId) -> Option<&'static str> { match id { SOLAR_SYSTEM_BARYCENTER => Some("Solar System Barycenter"), MERCURY => Some("Mercury"), @@ -47,13 +45,149 @@ pub mod celestial_objects { } } +/// Defines the orientations known to ANISE and SPICE. +/// References used in the constants. +/// \[1\] Jay Lieske, ``Precession Matrix Based on IAU (1976) +/// System of Astronomical Constants,'' Astron. Astrophys. +/// 73, 282-284 (1979). +/// +/// \[2\] E.M. Standish, Jr., ``Orientation of the JPL Ephemerides, +/// DE 200/LE 200, to the Dynamical Equinox of J2000,'' +/// Astron. Astrophys. 114, 297-302 (1982). +/// +/// \[3\] E.M. Standish, Jr., ``Conversion of Ephemeris Coordinates +/// from the B1950 System to the J2000 System,'' JPL IOM +/// 314.6-581, 24 June 1985. +/// +/// \[4\] E.M. Standish, Jr., ``The Equinox Offsets of the JPL +/// Ephemeris,'' JPL IOM 314.6-929, 26 February 1988. +/// +/// \[5\] Jay Lieske, ``Expressions for the Precession Quantities +/// Based upon the IAU (1976) System of Astronomical +/// Constants'' Astron. Astrophys. 58, 1-16 (1977). +/// +/// \[6\] Laura Bass and Robert Cesarone "Mars Observer Planetary +/// Constants and Models" JPL D-3444 November 1990. +/// +/// \[7\] "Explanatory Supplement to the Astronomical Almanac" +/// edited by P. Kenneth Seidelmann. University Science +/// Books, 20 Edgehill Road, Mill Valley, CA 94941 (1992) pub mod orientations { use crate::NaifId; - pub const J2000: NaifId = 0; + /// Earth mean equator, dynamical equinox of J2000. The root reference frame for SPICE. + pub const J2000: NaifId = 1; + /// Earth mean equator, dynamical equinox of B1950. + /// The B1950 reference frame is obtained by precessing the J2000 frame backwards from Julian year 2000 to Besselian year 1950, using the 1976 IAU precession model. + /// The rotation from B1950 to J2000 is + /// \[ -z \] \[ theta \] \[ -zeta \] + /// 3 2 3 + /// The values for z, theta, and zeta are computed from the formulas given in table 5 of [5]. + /// z = 1153.04066200330" + /// theta = 1002.26108439117" + /// zeta = 1152.84248596724" + pub const B1950: NaifId = 2; + /// Fundamental Catalog (4). The FK4 reference frame is derived from the B1950 frame by applying the equinox offset determined by Fricke. + /// \[ 0.525" \] + /// 3 + pub const FK4: NaifId = 3; + + /// JPL Developmental Ephemeris (118). The DE-118 reference frame is nearly identical to the FK4 frame. It is also derived from the B1950 frame. + /// Only the offset is different + /// + /// \[ 0.53155" \] + /// 3 + /// + /// In [2], Standish uses two separate rotations, + /// + /// \[ 0.00073" \] P \[ 0.5316" \] + /// 3 3 + /// + /// (where P is the precession matrix used above to define the B1950 frame). The major effect of the second rotation is to correct for truncating the magnitude of the first rotation. + /// At his suggestion, we will use the untruncated value, and stick to a single rotation. + /// + /// + /// Most of the other DE historical reference frames are defined relative to either the DE-118 or B1950 frame. + /// The values below are taken from \[4\]. + ///```text + /// DE number Offset from DE-118 Offset from B1950 + /// --------- ------------------ ----------------- + /// 96 +0.1209" +0.4107" + /// 102 +0.3956" +0.1359" + /// 108 +0.0541" +0.4775" + /// 111 -0.0564" +0.5880" + /// 114 -0.0213" +0.5529" + /// 122 +0.0000" +0.5316" + /// 125 -0.0438" +0.5754" + /// 130 +0.0069" +0.5247" + ///``` + pub const DE118: NaifId = 4; + pub const DE096: NaifId = 5; + pub const DE102: NaifId = 6; + pub const DE108: NaifId = 7; + pub const DE111: NaifId = 8; + pub const DE114: NaifId = 9; + pub const DE122: NaifId = 10; + pub const DE125: NaifId = 11; + pub const DE130: NaifId = 12; + /// Galactic System II. The Galactic System II reference frame is defined by the following rotations: + /// o o o + /// \[ 327 \] \[ 62.6 \] \[ 282.25 \] + /// 3 1 3 + /// In the absence of better information, we assume the rotations are relative to the FK4 frame. + pub const GALACTIC: NaifId = 13; + pub const DE200: NaifId = 14; + pub const DE202: NaifId = 15; + /// Mars Mean Equator and IAU vector of J2000. The IAU-vector at Mars is the point on the mean equator of Mars where the equator ascends through the earth mean equator. + /// This vector is the cross product of Earth mean north with Mars mean north. + pub const MARSIAU: NaifId = 16; + /// Ecliptic coordinates based upon the J2000 frame. + /// The value for the obliquity of the ecliptic at J2000 is taken from page 114 of \[7\] equation 3.222-1. + /// This agrees with the expression given in \[5\]. + pub const ECLIPJ2000: NaifId = 17; + /// Ecliptic coordinates based upon the B1950 frame. + /// The value for the obliquity of the ecliptic at B1950 is taken from page 171 of \[7\]. + pub const ECLIPB1950: NaifId = 18; + /// JPL Developmental Ephemeris. (140) + /// The DE-140 frame is the DE-400 frame rotated: + /// + /// 0.9999256765384668 0.0111817701197967 0.0048589521583895 + /// -0.0111817701797229 0.9999374816848701 -0.0000271545195858 + /// -0.0048589520204830 -0.0000271791849815 0.9999881948535965 + /// + /// The DE-400 frame is treated as equivalent to the J2000 frame. + pub const DE140: NaifId = 19; + + /// JPL Developmental Ephemeris. (142) + /// The DE-142 frame is the DE-402 frame rotated: + /// + /// 0.9999256765402605 0.0111817697320531 0.0048589526815484 + /// -0.0111817697907755 0.9999374816892126 -0.0000271547693170 + /// -0.0048589525464121 -0.0000271789392288 0.9999881948510477 + /// + /// The DE-402 frame is treated as equivalent to the J2000 frame. + pub const DE142: NaifId = 20; - pub const fn orientation_name_from_id<'a>(hash: NaifId) -> Option<&'a str> { + /// JPL Developmental Ephemeris. (143) + /// The DE-143 frame is the DE-403 frame rotated: + /// + /// 0.9999256765435852 0.0111817743077255 0.0048589414674762 + /// -0.0111817743300355 0.9999374816382505 -0.0000271622115251 + /// -0.0048589414161348 -0.0000271713942366 0.9999881949053349 + /// + /// The DE-403 frame is treated as equivalent to the J2000 frame. + pub const DE143: NaifId = 21; + + /// Given the frame ID, try to return a human name + /// Source: // https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/req/frames.html#Appendix.%20%60%60Built%20in''%20Inertial%20Reference%20Frames + pub const fn orientation_name_from_id(hash: NaifId) -> Option<&'static str> { match hash { J2000 => Some("J2000"), + B1950 => Some("B1950"), + FK4 => Some("FK4"), + GALACTIC => Some("Galactic"), + MARSIAU => Some("Mars IAU"), + ECLIPJ2000 => Some("ECLIPJ2000"), + ECLIPB1950 => Some("ECLIPB1950"), _ => None, } } @@ -78,4 +212,67 @@ pub mod frames { pub const SUN_J2000: Frame = Frame::from_ephem_orient(SUN, J2000); pub const LUNA_J2000: Frame = Frame::from_ephem_orient(LUNA, J2000); pub const EARTH_J2000: Frame = Frame::from_ephem_orient(EARTH, J2000); + pub const EME2000: Frame = Frame::from_ephem_orient(EARTH, J2000); +} + +#[cfg(test)] +mod constants_ut { + use crate::constants::orientations::{ + orientation_name_from_id, B1950, ECLIPB1950, ECLIPJ2000, FK4, J2000, MARSIAU, + }; + + use crate::constants::celestial_objects::*; + + #[test] + fn orient_name_from_id() { + assert_eq!(orientation_name_from_id(J2000).unwrap(), "J2000"); + assert_eq!(orientation_name_from_id(B1950).unwrap(), "B1950"); + assert_eq!(orientation_name_from_id(ECLIPB1950).unwrap(), "ECLIPB1950"); + assert_eq!(orientation_name_from_id(ECLIPJ2000).unwrap(), "ECLIPJ2000"); + assert_eq!(orientation_name_from_id(FK4).unwrap(), "FK4"); + assert_eq!(orientation_name_from_id(MARSIAU).unwrap(), "Mars IAU"); + assert!(orientation_name_from_id(-1).is_none()); + } + + #[test] + fn object_name_from_id() { + assert_eq!( + celestial_name_from_id(SOLAR_SYSTEM_BARYCENTER).unwrap(), + "Solar System Barycenter" + ); + assert_eq!(celestial_name_from_id(MERCURY).unwrap(), "Mercury"); + assert_eq!(celestial_name_from_id(VENUS).unwrap(), "Venus"); + assert_eq!( + celestial_name_from_id(EARTH_MOON_BARYCENTER).unwrap(), + "Earth-Moon Barycenter" + ); + assert_eq!( + celestial_name_from_id(MARS_BARYCENTER).unwrap(), + "Mars Barycenter" + ); + assert_eq!( + celestial_name_from_id(JUPITER_BARYCENTER).unwrap(), + "Jupiter Barycenter" + ); + assert_eq!( + celestial_name_from_id(SATURN_BARYCENTER).unwrap(), + "Saturn Barycenter" + ); + assert_eq!( + celestial_name_from_id(URANUS_BARYCENTER).unwrap(), + "Uranus Barycenter" + ); + assert_eq!( + celestial_name_from_id(NEPTUNE_BARYCENTER).unwrap(), + "Neptune Barycenter" + ); + assert_eq!( + celestial_name_from_id(PLUTO_BARYCENTER).unwrap(), + "Pluto Barycenter" + ); + assert_eq!(celestial_name_from_id(SUN).unwrap(), "Sun"); + assert_eq!(celestial_name_from_id(LUNA).unwrap(), "Luna"); + assert_eq!(celestial_name_from_id(EARTH).unwrap(), "Earth"); + assert!(celestial_name_from_id(-1).is_none()); + } } diff --git a/src/ephemerides/mod.rs b/src/ephemerides/mod.rs index 50652b93..55a870b6 100644 --- a/src/ephemerides/mod.rs +++ b/src/ephemerides/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,6 +8,55 @@ * Documentation: https://nyxspace.com/ */ +use hifitime::Epoch; +use snafu::prelude::*; + +use crate::{ + errors::PhysicsError, math::interpolation::InterpolationError, naif::daf::DAFError, + prelude::FrameUid, NaifId, +}; + pub mod paths; pub mod translate_to_parent; pub mod translations; + +#[derive(Debug, Snafu, PartialEq)] +#[snafu(visibility(pub(crate)))] +pub enum EphemerisError { + /// Somehow you've entered code that should not be reachable, please file a bug. + Unreachable, + #[snafu(display( + "could not load SPK because all {max_slots} are used (modify `MAX_LOADED_SPKS` at build time)" + ))] + StructureIsFull { max_slots: usize }, + #[snafu(display( + "Could not translate from {from} to {to}: no common origin found at epoch {epoch}" + ))] + TranslationOrigin { + from: FrameUid, + to: FrameUid, + epoch: Epoch, + }, + #[snafu(display("no ephemeris data loaded (must call load_spk)"))] + NoEphemerisLoaded, + #[snafu(display("when {action} caused {source}"))] + SPK { + action: &'static str, + #[snafu(backtrace)] + source: DAFError, + }, + #[snafu(display("during an ephemeris operation: {source}"))] + UnderlyingPhysics { + #[snafu(backtrace)] + source: PhysicsError, + }, + #[snafu(display("during an ephemeris interpolation {source}"))] + EphemInterpolation { + #[snafu(backtrace)] + source: InterpolationError, + }, + #[snafu(display("unknown name associated with NAIF ID {id}"))] + IdToName { id: NaifId }, + #[snafu(display("unknown NAIF ID associated with `{name}`"))] + NameToId { name: String }, +} diff --git a/src/ephemerides/paths.rs b/src/ephemerides/paths.rs index 91cdd77e..12d7b13b 100644 --- a/src/ephemerides/paths.rs +++ b/src/ephemerides/paths.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -9,16 +9,13 @@ */ use hifitime::Epoch; -use log::error; +use snafu::{ensure, ResultExt}; +use super::{EphemerisError, NoEphemerisLoadedSnafu, SPKSnafu}; use crate::almanac::Almanac; -use crate::errors::InternalErrorKind; -use crate::naif::daf::NAIFSummaryRecord; +use crate::frames::Frame; +use crate::naif::daf::{DAFError, NAIFSummaryRecord}; use crate::NaifId; -use crate::{ - errors::{AniseError, IntegrityErrorKind}, - frames::Frame, -}; /// **Limitation:** no translation or rotation may have more than 8 nodes. pub const MAX_TREE_DEPTH: usize = 8; @@ -30,18 +27,18 @@ impl<'a> Almanac<'a> { /// /// 1. For each loaded SPK, iterated in reverse order (to mimic SPICE behavior) /// 2. For each summary record in each SPK, follow the ephemeris branch all the way up until the end of this SPK or until the SSB. - pub fn try_find_context_center(&self) -> Result { - if self.num_loaded_spk() == 0 { - // TODO: Change to another error - return Err(AniseError::NoInterpolationData); - } + pub fn try_find_context_center(&self) -> Result { + ensure!(self.num_loaded_spk() > 0, NoEphemerisLoadedSnafu); + // The common center is the absolute minimum of all centers due to the NAIF numbering. let mut common_center = i32::MAX; for maybe_spk in self.spk_data.iter().take(self.num_loaded_spk()).rev() { - let spk = maybe_spk.unwrap(); + let spk = maybe_spk.as_ref().unwrap(); - for summary in spk.data_summaries()? { + for summary in spk.data_summaries().with_context(|_| SPKSnafu { + action: "finding ephemeris root", + })? { // This summary exists, so we need to follow the branch of centers up the tree. if !summary.is_empty() && summary.center_id.abs() < common_center.abs() { common_center = summary.center_id; @@ -58,9 +55,9 @@ impl<'a> Almanac<'a> { /// Try to construct the path from the source frame all the way to the root ephemeris of this context. pub fn ephemeris_path_to_root( &self, - source: &Frame, + source: Frame, epoch: Epoch, - ) -> Result<(usize, [Option; MAX_TREE_DEPTH]), AniseError> { + ) -> Result<(usize, [Option; MAX_TREE_DEPTH]), EphemerisError> { let common_center = self.try_find_context_center()?; // Build a tree, set a fixed depth to avoid allocations let mut of_path = [None; MAX_TREE_DEPTH]; @@ -85,24 +82,20 @@ impl<'a> Almanac<'a> { } for _ in 0..MAX_TREE_DEPTH { - match self.spk_summary_at_epoch(center_id, epoch) { - Ok((summary, _, _)) => { - center_id = summary.center_id; - of_path[of_path_len] = Some(center_id); - of_path_len += 1; - if center_id == common_center { - // We're found the path! - return Ok((of_path_len, of_path)); - } - } - Err(e) => { - error!("I don't think this should happen: {e}"); - return Err(AniseError::InternalError(InternalErrorKind::Generic)); - } + let summary = self.spk_summary_at_epoch(center_id, epoch)?.0; + center_id = summary.center_id; + of_path[of_path_len] = Some(center_id); + of_path_len += 1; + if center_id == common_center { + // We're found the path! + return Ok((of_path_len, of_path)); } } - Err(AniseError::MaxTreeDepth) + Err(EphemerisError::SPK { + action: "computing path to common node", + source: DAFError::MaxRecursionDepth, + }) } /// Returns the ephemeris path between two frames and the common node. This may return a `DisjointRoots` error if the frames do not share a common root, which is considered a file integrity error. @@ -139,28 +132,26 @@ impl<'a> Almanac<'a> { from_frame: Frame, to_frame: Frame, epoch: Epoch, - ) -> Result<(usize, [Option; MAX_TREE_DEPTH], NaifId), AniseError> { - // TODO: Consider returning a structure that has explicit fields -- see how I use it first + ) -> Result<(usize, [Option; MAX_TREE_DEPTH], NaifId), EphemerisError> { if from_frame == to_frame { // Both frames match, return this frame's hash (i.e. no need to go higher up). return Ok((0, [None; MAX_TREE_DEPTH], from_frame.ephemeris_id)); } // Grab the paths - let (from_len, from_path) = self.ephemeris_path_to_root(&from_frame, epoch)?; - let (to_len, to_path) = self.ephemeris_path_to_root(&to_frame, epoch)?; + let (from_len, from_path) = self.ephemeris_path_to_root(from_frame, epoch)?; + let (to_len, to_path) = self.ephemeris_path_to_root(to_frame, epoch)?; // Now that we have the paths, we can find the matching origin. // If either path is of zero length, that means one of them is at the root of this ANISE file, so the common // path is which brings the non zero-length path back to the file root. if from_len == 0 && to_len == 0 { - Err(AniseError::IntegrityError( - IntegrityErrorKind::DisjointRoots { - from_frame, - to_frame, - }, - )) + Err(EphemerisError::TranslationOrigin { + from: from_frame.into(), + to: to_frame.into(), + epoch, + }) } else if from_len != 0 && to_len == 0 { // One has an empty path but not the other, so the root is at the empty path Ok((from_len, from_path, to_frame.ephemeris_id)) @@ -201,7 +192,7 @@ impl<'a> Almanac<'a> { } // This is weird and I don't think it should happen, so let's raise an error. - Err(AniseError::IntegrityError(IntegrityErrorKind::DataMissing)) + Err(EphemerisError::Unreachable) } } } diff --git a/src/ephemerides/translate_to_parent.rs b/src/ephemerides/translate_to_parent.rs index eeb1be94..b981fef0 100644 --- a/src/ephemerides/translate_to_parent.rs +++ b/src/ephemerides/translate_to_parent.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -9,16 +9,18 @@ */ use log::trace; +use snafu::ResultExt; +use super::{EphemerisError, SPKSnafu}; use crate::almanac::Almanac; use crate::astro::Aberration; -use crate::errors::IntegrityErrorKind; +use crate::ephemerides::EphemInterpolationSnafu; use crate::hifitime::Epoch; -use crate::math::units::*; +use crate::math::cartesian::CartesianState; use crate::math::Vector3; use crate::naif::daf::NAIFDataSet; use crate::naif::spk::datatypes::{HermiteSetType13, LagrangeSetType9, Type2ChebyshevSet}; -use crate::{errors::AniseError, prelude::Frame}; +use crate::prelude::Frame; impl<'a> Almanac<'a> { /// Returns the position vector and velocity vector of the `source` with respect to its parent in the ephemeris at the provided epoch, @@ -32,17 +34,12 @@ impl<'a> Almanac<'a> { /// + As of now, some interpolation types are not supported, and if that were to happen, this would return an error. /// /// **WARNING:** This function only performs the translation and no rotation whatsoever. Use the `transform_to_parent_from` function instead to include rotations. - pub fn translate_to_parent( + pub(crate) fn translation_parts_to_parent( &self, source: Frame, epoch: Epoch, _ab_corr: Aberration, - distance_unit: LengthUnit, - time_unit: TimeUnit, - ) -> Result<(Vector3, Vector3, Vector3, Frame), AniseError> { - // TODO: Create a CartesianState struct which can be "upgraded" to an Orbit if the frame is of the correct type? - // I guess this is what the `Orbit` struct in Nyx does. - + ) -> Result<(Vector3, Vector3, Frame), EphemerisError> { // First, let's find the SPK summary for this frame. let (summary, spk_no, idx_in_spk) = self.spk_summary_at_epoch(source.ephemeris_id, epoch)?; @@ -51,42 +48,64 @@ impl<'a> Almanac<'a> { trace!("query {source} wrt to {new_frame} @ {epoch:E}"); + // This should not fail because we've fetched the spk_no from above with the spk_summary_at_epoch call. let spk_data = self.spk_data[spk_no] - .ok_or(AniseError::IntegrityError(IntegrityErrorKind::DataMissing))?; - - // Perform a translation with position and velocity; - let acc = Vector3::zeros(); + .as_ref() + .ok_or(EphemerisError::Unreachable)?; // Now let's simply evaluate the data let (pos_km, vel_km_s) = match summary.data_type_i { // TODO : match against enumeration instead of direct integer match for clarity ? 2 => { // Type 2 Chebyshev - let data = spk_data.nth_data::(idx_in_spk)?; - data.evaluate(epoch, summary)? + let data = spk_data + .nth_data::(idx_in_spk) + .with_context(|_| SPKSnafu { + action: "fetching data for interpolation", + })?; + data.evaluate(epoch, summary) + .with_context(|_| EphemInterpolationSnafu)? } 9 => { // Type 9: Lagrange Interpolation --- Unequal Time Steps - let data = spk_data.nth_data::(idx_in_spk)?; - data.evaluate(epoch, summary)? + let data = spk_data + .nth_data::(idx_in_spk) + .with_context(|_| SPKSnafu { + action: "fetching data for interpolation", + })?; + data.evaluate(epoch, summary) + .with_context(|_| EphemInterpolationSnafu)? } 13 => { // Type 13: Hermite Interpolation --- Unequal Time Steps - let data = spk_data.nth_data::(idx_in_spk)?; - data.evaluate(epoch, summary)? + let data = spk_data + .nth_data::(idx_in_spk) + .with_context(|_| SPKSnafu { + action: "fetching data for interpolation", + })?; + data.evaluate(epoch, summary) + .with_context(|_| EphemInterpolationSnafu)? } _ => todo!("{} is not yet supported", summary.data_type_i), }; - // Convert the units based on the storage units. - let dist_unit_factor = LengthUnit::Kilometer.from_meters() * distance_unit.to_meters(); - let time_unit_factor = TimeUnit::Second.from_seconds() * time_unit.in_seconds(); + Ok((pos_km, vel_km_s, new_frame)) + } + + pub fn translate_to_parent( + &self, + source: Frame, + epoch: Epoch, + ab_corr: Aberration, + ) -> Result { + let (radius_km, velocity_km_s, frame) = + self.translation_parts_to_parent(source, epoch, ab_corr)?; - Ok(( - pos_km * dist_unit_factor, - vel_km_s * dist_unit_factor / time_unit_factor, - acc * dist_unit_factor / time_unit_factor.powi(2), - new_frame, - )) + Ok(CartesianState { + radius_km, + velocity_km_s, + epoch, + frame, + }) } } diff --git a/src/ephemerides/translations.rs b/src/ephemerides/translations.rs index 446ef28a..abde0d5e 100644 --- a/src/ephemerides/translations.rs +++ b/src/ephemerides/translations.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,22 +8,23 @@ * Documentation: https://nyxspace.com/ */ +use snafu::ResultExt; + +use super::EphemerisError; +use super::UnderlyingPhysicsSnafu; use crate::almanac::Almanac; use crate::astro::Aberration; use crate::hifitime::Epoch; use crate::math::cartesian::CartesianState; use crate::math::units::*; use crate::math::Vector3; -use crate::{ - errors::AniseError, - prelude::{Frame, FrameTrait}, -}; +use crate::prelude::Frame; /// **Limitation:** no translation or rotation may have more than 8 nodes. pub const MAX_TREE_DEPTH: usize = 8; impl<'a> Almanac<'a> { - /// Returns the position vector, velocity vector, and acceleration vector needed to translate the `from_frame` to the `to_frame`. + /// Returns the Cartesian state needed to translate the `from_frame` to the `to_frame`. /// /// **WARNING:** This function only performs the translation and no rotation whatsoever. Use the `transform_from_to` function instead to include rotations. /// @@ -34,9 +35,15 @@ impl<'a> Almanac<'a> { to_frame: Frame, epoch: Epoch, ab_corr: Aberration, - length_unit: LengthUnit, - time_unit: TimeUnit, - ) -> Result { + ) -> Result { + let mut to_frame: Frame = to_frame; + + // If there is no frame info, the user hasn't loaded this frame, but might still want to compute a translation. + if let Ok(to_frame_info) = self.frame_from_uid(to_frame) { + // User has loaded the planetary data for this frame, so let's use that as the to_frame. + to_frame = to_frame_info; + } + if from_frame == to_frame { // Both frames match, return this frame's hash (i.e. no need to go higher up). return Ok(CartesianState::zero(from_frame)); @@ -46,49 +53,37 @@ impl<'a> Almanac<'a> { self.common_ephemeris_path(from_frame, to_frame, epoch)?; // The fwrd variables are the states from the `from frame` to the common node - let (mut pos_fwrd, mut vel_fwrd, mut acc_fwrd, mut frame_fwrd) = - if from_frame.ephem_origin_hash_match(common_node) { - ( - Vector3::zeros(), - Vector3::zeros(), - Vector3::zeros(), - from_frame, - ) + let (mut pos_fwrd, mut vel_fwrd, mut frame_fwrd) = + if from_frame.ephem_origin_id_match(common_node) { + (Vector3::zeros(), Vector3::zeros(), from_frame) } else { - self.translate_to_parent(from_frame, epoch, ab_corr, length_unit, time_unit)? + self.translation_parts_to_parent(from_frame, epoch, ab_corr)? }; // The bwrd variables are the states from the `to frame` back to the common node - let (mut pos_bwrd, mut vel_bwrd, mut acc_bwrd, mut frame_bwrd) = - if to_frame.ephem_origin_hash_match(common_node) { - ( - Vector3::zeros(), - Vector3::zeros(), - Vector3::zeros(), - to_frame, - ) + let (mut pos_bwrd, mut vel_bwrd, mut frame_bwrd) = + if to_frame.ephem_origin_id_match(common_node) { + (Vector3::zeros(), Vector3::zeros(), to_frame) } else { - self.translate_to_parent(to_frame, epoch, ab_corr, length_unit, time_unit)? + self.translation_parts_to_parent(to_frame, epoch, ab_corr)? }; for cur_node_hash in path.iter().take(node_count) { - if !frame_fwrd.ephem_origin_hash_match(common_node) { - let (cur_pos_fwrd, cur_vel_fwrd, cur_acc_fwrd, cur_frame_fwrd) = - self.translate_to_parent(frame_fwrd, epoch, ab_corr, length_unit, time_unit)?; + if !frame_fwrd.ephem_origin_id_match(common_node) { + let (cur_pos_fwrd, cur_vel_fwrd, cur_frame_fwrd) = + self.translation_parts_to_parent(frame_fwrd, epoch, ab_corr)?; pos_fwrd += cur_pos_fwrd; vel_fwrd += cur_vel_fwrd; - acc_fwrd += cur_acc_fwrd; frame_fwrd = cur_frame_fwrd; } - if !frame_bwrd.ephem_origin_hash_match(common_node) { - let (cur_pos_bwrd, cur_vel_bwrd, cur_acc_bwrd, cur_frame_bwrd) = - self.translate_to_parent(frame_bwrd, epoch, ab_corr, length_unit, time_unit)?; + if !frame_bwrd.ephem_origin_id_match(common_node) { + let (cur_pos_bwrd, cur_vel_bwrd, cur_frame_bwrd) = + self.translation_parts_to_parent(frame_bwrd, epoch, ab_corr)?; pos_bwrd += cur_pos_bwrd; vel_bwrd += cur_vel_bwrd; - acc_bwrd += cur_acc_bwrd; frame_bwrd = cur_frame_bwrd; } @@ -101,80 +96,19 @@ impl<'a> Almanac<'a> { Ok(CartesianState { radius_km: pos_fwrd - pos_bwrd, velocity_km_s: vel_fwrd - vel_bwrd, - acceleration_km_s2: Some(acc_fwrd - acc_bwrd), epoch, frame: to_frame, }) } - /// Returns the position vector, velocity vector, and acceleration vector needed to translate the `from_frame` to the `to_frame`, where the distance is in km, the velocity in km/s, and the acceleration in km/s^2. - pub fn translate_from_to_km_s( - &self, - from_frame: Frame, - to_frame: Frame, - epoch: Epoch, - ab_corr: Aberration, - ) -> Result { - self.translate_from_to( - from_frame, - to_frame, - epoch, - ab_corr, - LengthUnit::Kilometer, - TimeUnit::Second, - ) - } - - /// Returns the position vector, velocity vector, and acceleration vector needed to translate the `from_frame` to the `to_frame`, where the distance is in m, the velocity in m/s, and the acceleration in m/s^2. - pub fn translate_from_to_m_s( - &self, - from_frame: Frame, - to_frame: Frame, - epoch: Epoch, - ab_corr: Aberration, - ) -> Result { - self.translate_from_to( - from_frame, - to_frame, - epoch, - ab_corr, - LengthUnit::Meter, - TimeUnit::Second, - ) - } - /// Returns the geometric position vector, velocity vector, and acceleration vector needed to translate the `from_frame` to the `to_frame`, where the distance is in km, the velocity in km/s, and the acceleration in km/s^2. - pub fn translate_from_to_km_s_geometric( + pub fn translate_from_to_geometric( &self, from_frame: Frame, to_frame: Frame, epoch: Epoch, - ) -> Result { - self.translate_from_to( - from_frame, - to_frame, - epoch, - Aberration::None, - LengthUnit::Kilometer, - TimeUnit::Second, - ) - } - - /// Returns the geometric position vector, velocity vector, and acceleration vector needed to translate the `from_frame` to the `to_frame`, where the distance is in m, the velocity in m/s, and the acceleration in m/s^2. - pub fn translate_from_to_m_s_geometric( - &self, - from_frame: Frame, - to_frame: Frame, - epoch: Epoch, - ) -> Result { - self.translate_from_to( - from_frame, - to_frame, - epoch, - Aberration::None, - LengthUnit::Meter, - TimeUnit::Second, - ) + ) -> Result { + self.translate_from_to(from_frame, to_frame, epoch, Aberration::None) } /// Translates a state with its origin (`to_frame`) and given its units (distance_unit, time_unit), returns that state with respect to the requested frame @@ -191,16 +125,9 @@ impl<'a> Almanac<'a> { ab_corr: Aberration, distance_unit: LengthUnit, time_unit: TimeUnit, - ) -> Result { + ) -> Result { // Compute the frame translation - let frame_state = self.translate_from_to( - from_frame, - to_frame, - epoch, - ab_corr, - distance_unit, - time_unit, - )?; + let frame_state = self.translate_from_to(from_frame, to_frame, epoch, ab_corr)?; let dist_unit_factor = LengthUnit::Kilometer.from_meters() * distance_unit.to_meters(); let time_unit_factor = time_unit.in_seconds(); @@ -208,11 +135,10 @@ impl<'a> Almanac<'a> { let input_state = CartesianState { radius_km: position * dist_unit_factor, velocity_km_s: velocity * dist_unit_factor / time_unit_factor, - acceleration_km_s2: None, epoch, frame: from_frame, }; - input_state + frame_state + (input_state + frame_state).with_context(|_| UnderlyingPhysicsSnafu {}) } } diff --git a/src/errors.rs b/src/errors.rs index 1c1d6fab..5401854f 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -9,73 +9,58 @@ */ use hifitime::Epoch; +use snafu::prelude::*; -use crate::prelude::Frame; +use crate::prelude::FrameUid; use crate::structure::semver::Semver; +use crate::NaifId; use core::convert::From; -use core::fmt; +use der::Error as DerError; use std::io::ErrorKind as IOErrorKind; -#[derive(Clone, PartialEq, Debug)] -pub enum AniseError { - StructureIsFull, +#[derive(Debug, Snafu)] +pub enum InputOutputError { /// Raised for an error in reading or writing the file(s) - IOError(IOErrorKind), + IOError { kind: IOErrorKind }, /// Raised if an IO error occurred but its representation is not simple (and therefore not an std::io::ErrorKind). IOUnknownError, - /// Math error - MathError(MathErrorKind), - /// Raised when requesting the value of a parameter but it does not have any representation (typically the coefficients are an empty array) - ParameterNotSpecified, - /// The byte stream is missing data that is required to parse. - MalformedData(usize), - /// If the NAIF file cannot be read or isn't supported - DAFParserError(String), - InvalidTimeSystem, - /// Raised if there is some kind of error with the underlying data, e.g. invalid checksum, or NaN/Inf values when that is not acceptable. - IntegrityError(IntegrityErrorKind), - /// Raised if the item sought after is not found in the context - ItemNotFound, - /// Raised when requesting the interpolation for data that is not available in this spline. - NoInterpolationData, - /// If this is raised, please report a bug - InternalError(InternalErrorKind), - /// Raised to prevent overwriting an existing file - FileExists, - /// Raised if a transformation is requested but the frames have no common origin - DisjointFrames { - from_frame: Frame, - to_frame: Frame, +} + +#[derive(Debug, Snafu, PartialEq)] +#[snafu(visibility(pub(crate)))] +pub enum DecodingError { + #[snafu(display( + "could not decode {dataset} data -- need at least {need} doubles but found {got}" + ))] + TooFewDoubles { + dataset: &'static str, + got: usize, + need: usize, }, - /// Raised if the ephemeris or orientation is deeper to the context origin than this library supports - MaxTreeDepth, - /// Raised if there is no interpolation data for the requested epoch, i.e. ephemeris/orientation starts after or ends before the requested epoch - MissingInterpolationData(Epoch), - /// Raised if a computation is physically wrong - PhysicsError(PhysicsErrorKind), - IncompatibleVersion { - got: Semver, - exp: Semver, + #[snafu(display("bytes between indexes {start} and {end} could not be read, array contains {size} bytes (data malformed?)"))] + InaccessibleBytes { + start: usize, + end: usize, + size: usize, }, - DecodingError(der::Error), - IncompatibleRotation { - from: i32, - to: i32, + #[snafu(display("integrity error during decoding: {source}"))] + Integrity { + #[snafu(backtrace)] + source: IntegrityError, }, + #[snafu(display("decoding DER failed: {err}"))] + DecodingDer { err: DerError }, + #[snafu(display("somehow casting the data failed"))] + Casting, + #[snafu(display("could not load ANISE data version {got}, expected {exp}"))] + AniseVersion { got: Semver, exp: Semver }, + #[snafu(display("data could not be parsed as {kind} despite ANISE version matching (should be loaded as another type?)"))] + Obscure { kind: &'static str }, } -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub enum InternalErrorKind { - /// Appending to the lookup table failed - LUTAppendFailure, - /// May happen if the interpolation scheme is not yet supported - InterpolationNotSupported, - /// Some generic internal error, check the logs of the program and file a bug report - Generic, -} - -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub enum IntegrityErrorKind { +#[derive(Copy, Clone, PartialEq, Debug, Snafu)] +#[snafu(visibility(pub(crate)))] +pub enum IntegrityError { /// Data checksum differs from expected checksum ChecksumInvalid { expected: u32, computed: u32 }, /// Data between two ephemerides expected to be identical mismatch (may happen on merger of files) @@ -85,91 +70,84 @@ pub enum IntegrityErrorKind { /// The lookup table is broken somehow LookupTable, /// Raised if a transformation is requested but the frames have no common origin - DisjointRoots { from_frame: Frame, to_frame: Frame }, - /// Raised if some f64 data is NaN, infinity, or negative infinity. - SubNormal, + DisjointRoots { + from_frame: FrameUid, + to_frame: FrameUid, + }, + #[snafu(display( + "data for {variable} in {dataset} decoded as subnormal double (data malformed?)" + ))] + SubNormal { + dataset: &'static str, + variable: &'static str, + }, + #[snafu(display("data for {variable}={value} in {dataset} is invalid {reason}"))] + InvalidValue { + dataset: &'static str, + variable: &'static str, + value: f64, + reason: &'static str, + }, } -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum MathErrorKind { - DivisionByZero, - StateEpochsDiffer, - StateFramesDiffer, +#[derive(Clone, PartialEq, Eq, Debug, Snafu)] +#[snafu(visibility(pub(crate)))] +pub enum MathError { + #[snafu(display("prevented a division by zero when {action}"))] + DivisionByZero { action: &'static str }, + #[snafu(display("could"))] InvalidInterpolationData, - PolynomialOrderError(usize), -} - -#[derive(Copy, Clone, PartialEq, Debug)] -pub enum PhysicsErrorKind { - /// ANISE does not support parabolic orbits because these are not physically real. - ParabolicOrbit, - /// True anomaly of the provided hyperbolic orbit is physically impossible - InvalidHyperbolicTrueAnomaly(f64), - /// Some computation led to a value being infinite, check the error logs - InfiniteValue, -} - -impl From for AniseError { - fn from(e: IOErrorKind) -> Self { - Self::IOError(e) - } -} - -impl From for AniseError { - fn from(e: InternalErrorKind) -> Self { - Self::InternalError(e) - } } -impl From for AniseError { - fn from(e: MathErrorKind) -> Self { - Self::MathError(e) - } +#[derive(Debug, Snafu, PartialEq)] +#[snafu(visibility(pub(crate)))] +pub enum PhysicsError { + /// Somehow you've entered code that should not be reachable, please file a bug. + Unreachable, + #[snafu(display("epochs {epoch1} and {epoch2} differ while {action}"))] + EpochMismatch { + action: &'static str, + epoch1: Epoch, + epoch2: Epoch, + }, + #[snafu(display("frames {frame1} and {frame2} differ while {action}"))] + FrameMismatch { + action: &'static str, + frame1: FrameUid, + frame2: FrameUid, + }, + #[snafu(display("origins {from1} and {from2} differ while {action}"))] + OriginMismatch { + action: &'static str, + from1: NaifId, + from2: NaifId, + }, + #[snafu(display("{action} requires the time derivative of the DCM but it is not set"))] + DCMMissingDerivative { action: &'static str }, + #[snafu(display("{action} requires the frame {frame} to have {data} defined"))] + MissingFrameData { + action: &'static str, + data: &'static str, + frame: FrameUid, + }, + #[snafu(display("parabolic orbits are physically impossible and the eccentricity calculated to be within {limit:e} of 1.0"))] + ParabolicEccentricity { limit: f64 }, + #[snafu(display("parabolic orbits are physically impossible and the semilatus rectum (semi-parameter) calculated to be {p}"))] + ParabolicSemiParam { p: f64 }, + #[snafu(display("hyperbolic true anomaly is physically impossible: {ta_deg} deg"))] + HyperbolicTrueAnomaly { ta_deg: f64 }, + #[snafu(display("infinite value encountered when {action}"))] + InfiniteValue { action: &'static str }, + #[snafu(display("{source}"))] + AppliedMath { source: MathError }, + #[snafu(display("invalid radius {action}"))] + RadiusError { action: &'static str }, + #[snafu(display("invalid velocity {action}"))] + VelocityError { action: &'static str }, } -impl fmt::Display for AniseError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { - match self { - Self::StructureIsFull => write!(f, "ANISE error: attempted to load more data but no more room was available"), - Self::IOError(e) => write!(f, "ANISE error: IOError: {e:?}"), - Self::IOUnknownError => write!(f, "ANISE error: IOUnknownError"), - Self::MathError(e) => write!(f, "ANISE error: MathError: {e:?}"), - Self::ParameterNotSpecified => write!(f, "ANISE error: ParameterNotSpecified"), - Self::MalformedData(byte) => write!(f, "ANISE error: Malformed data: could not read up to byte {byte}."), - Self::DAFParserError(reason) => { - write!(f, "ANISE error: invalid NAIF DAF file: {}", reason) - } - Self::InvalidTimeSystem => write!(f, "ANISE error: invalid time system"), - Self::IntegrityError(e) => write!(f, "ANISE error: data integrity error: {e:?}"), - Self::ItemNotFound => write!(f, "ANISE error: requested item not found in context"), - Self::InternalError(e) => { - write!(f, "ANISE internal error: {e:?} -- please report a bug") - } - Self::NoInterpolationData => write!( - f, - "ANISE error: No interpolation for the requested component" - ), - Self::FileExists => write!( - f, - "ANISE error: operation aborted to prevent overwriting an existing file" - ), - Self::DisjointFrames { from_frame: from, to_frame: to } => write!( - f, - "ANISE error: frame {} and {} do not share a common origin", - to, from - ), - Self::MaxTreeDepth => write!( - f, - "ANISE error: the ephemeris or orientation is deeper to the context origin than this library supports" - ), - Self::MissingInterpolationData(e) => write!( - f, - "ANISE error: No interpolation as epoch {e:e}" - ), - Self::PhysicsError(e) => write!(f, "ANISE error: Physics error: {e:?}"), - _ => write!(f, "ANISE error: {self:?}") - } +impl From for InputOutputError { + fn from(kind: IOErrorKind) -> Self { + Self::IOError { kind } } } - -impl std::error::Error for AniseError {} diff --git a/src/frames/celestial_frame.rs b/src/frames/celestial_frame.rs deleted file mode 100644 index 45661043..00000000 --- a/src/frames/celestial_frame.rs +++ /dev/null @@ -1,79 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ - -use crate::{almanac::Almanac, prelude::AniseError, NaifId}; - -use super::{Frame, FrameTrait}; -use core::fmt; - -/// Defines a Celestial Frame kind, which is a Frame that also defines a standard gravitational parameter -pub trait CelestialFrameTrait: FrameTrait { - /// Returns the standard gravitational parameter of this frame (consider switching to UOM for this) - fn mu_km3_s2(&self) -> f64; -} - -/// A CelestialFrame is a frame whose equatorial and semi major radii are defined. -#[derive(Copy, Clone, Debug, PartialEq)] -pub struct CelestialFrame { - pub frame: Frame, - pub mu_km3_s2: f64, -} - -impl FrameTrait for CelestialFrame { - fn ephemeris_hash(&self) -> NaifId { - self.frame.ephemeris_hash() - } - - fn orientation_hash(&self) -> NaifId { - self.frame.orientation_hash() - } -} - -impl CelestialFrameTrait for CelestialFrame { - fn mu_km3_s2(&self) -> f64 { - self.mu_km3_s2 - } -} - -impl fmt::Display for CelestialFrame { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { - write!(f, "{} (μ = {} km3/s)", self.frame, self.mu_km3_s2()) - } -} - -impl<'a> Almanac<'a> { - /// Tries to find the celestial frame data given the ephemeris center name and the orientation name. - /// # Note - /// The ephemeris name MUST match the name of the planetary constant. - /// To load the planetary constants with another name, use `celestial_frame_from` - pub fn celestial_frame( - &self, - ephemeris_name: &'a str, - orientation_name: &'a str, - ) -> Result { - self.celestial_frame_from(ephemeris_name, orientation_name, ephemeris_name) - } - - /// Tries to find the celestial frame data given the ephemeris center name, the orientation name, and the name of the planetary constants - pub fn celestial_frame_from( - &self, - _ephemeris_name: &'a str, - _orientation_name: &'a str, - _planetary_constants_name: &'a str, - ) -> Result { - todo!() - // let constants = self.planetary_constants_from_name(planetary_constants_name)?; - - // Ok(CelestialFrame { - // frame: Frame::from_ephemeris_orientation_names(ephemeris_name, orientation_name), - // mu_km3_s2: constants.mu_km3_s2, - // }) - } -} diff --git a/src/frames/frame.rs b/src/frames/frame.rs index 7f8b3fc3..c34c02a0 100644 --- a/src/frames/frame.rs +++ b/src/frames/frame.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -13,88 +13,120 @@ use core::fmt::Debug; use crate::constants::celestial_objects::celestial_name_from_id; use crate::constants::orientations::{orientation_name_from_id, J2000}; +use crate::errors::PhysicsError; +use crate::prelude::FrameUid; +use crate::structure::planetocentric::ellipsoid::Ellipsoid; use crate::NaifId; -/// Defines a Frame kind, allows for compile time checking of operations. -pub trait FrameTrait: Copy + Debug + PartialEq + Send + Sync { - /// Returns the ephemeris hash of this frame. - /// TODO: Rename away from `hash` these are no longer hashes but NaifIDs - fn ephemeris_hash(&self) -> NaifId; - /// Returns the orientation hash of this frame. - fn orientation_hash(&self) -> NaifId; - /// Returns true if the ephemeris origin is equal to the provided hash - fn ephem_origin_hash_match(&self, other_hash: NaifId) -> bool { - self.ephemeris_hash() == other_hash - } - /// Returns true if the orientation origin is equal to the provided hash - fn orient_origin_hash_match(&self, other_hash: NaifId) -> bool { - self.orientation_hash() == other_hash - } - /// Returns true if the ephemeris origin is equal to the provided frame - fn ephem_origin_match(&self, other: Self) -> bool { - self.ephem_origin_hash_match(other.ephemeris_hash()) - } - /// Returns true if the orientation origin is equal to the provided frame - fn orient_origin_match(&self, other: Self) -> bool { - self.orient_origin_hash_match(other.orientation_hash()) - } -} - /// A Frame uniquely defined by its ephemeris center and orientation. Refer to FrameDetail for frames combined with parameters. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq)] pub struct Frame { pub ephemeris_id: NaifId, pub orientation_id: NaifId, + /// Gravity parameter of this frame, only defined on celestial frames + pub mu_km3_s2: Option, + /// Shape of the geoid of this frame, only defined on geodetic frames + pub shape: Option, } -impl FrameTrait for Frame { - fn ephemeris_hash(&self) -> NaifId { - self.ephemeris_id +impl Frame { + /// Constructs a new frame given its ephemeris and orientations IDs, without defining anything else (so this is not a valid celestial frame, although the data could be populated later). + pub const fn from_ephem_orient(ephemeris_id: NaifId, orientation_id: NaifId) -> Self { + Self { + ephemeris_id, + orientation_id, + mu_km3_s2: None, + shape: None, + } } - fn orientation_hash(&self) -> NaifId { - self.orientation_id + pub const fn from_ephem_j2000(ephemeris_id: NaifId) -> Self { + Self::from_ephem_orient(ephemeris_id, J2000) } -} -impl Frame { - /// Constructs a new frame given its ephemeris and orientations hashes. - pub const fn from_ephem_orient(ephemeris_hash: NaifId, orientation_hash: NaifId) -> Self { - Self { - ephemeris_id: ephemeris_hash, - orientation_id: orientation_hash, - } + /// Returns a copy of this Frame whose ephemeris ID is set to the provided ID + pub const fn with_ephem(&self, new_ephem_id: NaifId) -> Self { + let mut me = *self; + me.ephemeris_id = new_ephem_id; + me } - pub fn from_ephemeris_orientation_names<'a>( - _ephemeris_name: &'a str, - _orientation_name: &'a str, - ) -> Self { - todo!() - // Self { - // ephemeris_id: hash(ephemeris_name.as_bytes()), - // orientation_id: hash(orientation_name.as_bytes()), - // } + /// Returns a copy of this Frame whose orientation ID is set to the provided ID + pub const fn with_orient(&self, new_orient_id: NaifId) -> Self { + let mut me = *self; + me.orientation_id = new_orient_id; + me } - pub const fn from_ephem_j2000(ephemeris_hash: NaifId) -> Self { - Self::from_ephem_orient(ephemeris_hash, J2000) + /// Returns whether this is a celestial frame + pub const fn is_celestial(&self) -> bool { + self.mu_km3_s2.is_some() } - /// Returns a copy of this Frame whose ephemeris hash is set to the provided hash - pub const fn with_ephem(&self, new_ephem_hash: NaifId) -> Self { - Self { - ephemeris_id: new_ephem_hash, - orientation_id: self.orientation_id, - } + /// Returns whether this is a geodetic frame + pub const fn is_geodetic(&self) -> bool { + self.mu_km3_s2.is_some() && self.shape.is_some() } - /// Returns a copy of this Frame whose orientation hash is set to the provided hash - pub const fn with_orient(&self, new_orient_hash: NaifId) -> Self { - Self { - ephemeris_id: self.ephemeris_id, - orientation_id: new_orient_hash, - } + /// Returns true if the ephemeris origin is equal to the provided ID + pub fn ephem_origin_id_match(&self, other_id: NaifId) -> bool { + self.ephemeris_id == other_id + } + /// Returns true if the orientation origin is equal to the provided ID + pub fn orient_origin_id_match(&self, other_id: NaifId) -> bool { + self.orientation_id == other_id + } + /// Returns true if the ephemeris origin is equal to the provided frame + pub fn ephem_origin_match(&self, other: Self) -> bool { + self.ephem_origin_id_match(other.ephemeris_id) + } + /// Returns true if the orientation origin is equal to the provided frame + pub fn orient_origin_match(&self, other: Self) -> bool { + self.orient_origin_id_match(other.orientation_id) + } + + /// Returns the gravitational parameters of this frame, if defined + pub fn mu_km3_s2(&self) -> Result { + self.mu_km3_s2.ok_or(PhysicsError::MissingFrameData { + action: "retrieving mean equatorial radius", + data: "shape", + frame: self.into(), + }) + } + + /// Returns the mean equatorial radius in km, if defined + pub fn mean_equatorial_radius_km(&self) -> Result { + Ok(self + .shape + .ok_or(PhysicsError::MissingFrameData { + action: "retrieving mean equatorial radius", + data: "shape", + frame: self.into(), + })? + .mean_equatorial_radius_km()) + } + + /// Returns the semi major radius of the tri-axial ellipoid shape of this frame, if defined + pub fn semi_major_radius_km(&self) -> Result { + Ok(self + .shape + .ok_or(PhysicsError::MissingFrameData { + action: "retrieving semi major axis radius", + data: "shape", + frame: self.into(), + })? + .semi_major_equatorial_radius_km) + } + + pub fn flattening(&self) -> Result { + Ok(self + .shape + .ok_or(PhysicsError::MissingFrameData { + action: "retrieving flattening ratio", + data: "shape", + frame: self.into(), + })? + .flattening()) } } @@ -110,7 +142,18 @@ impl fmt::Display for Frame { None => format!("orientation {}", self.orientation_id), }; - write!(f, "{body_name} {orientation_name}") + write!(f, "{body_name} {orientation_name}")?; + if self.is_geodetic() { + write!( + f, + " (μ = {} km3/s, {})", + self.mu_km3_s2.unwrap(), + self.shape.unwrap() + )?; + } else if self.is_celestial() { + write!(f, " (μ = {} km3/s)", self.mu_km3_s2.unwrap())?; + } + Ok(()) } } @@ -136,3 +179,24 @@ impl fmt::Octal for Frame { write!(f, "{orientation_name}") } } + +impl fmt::LowerHex for Frame { + /// Only prints the UID + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { + let uid: FrameUid = self.into(); + write!(f, "{uid}") + } +} + +#[cfg(test)] +mod frame_ut { + use crate::constants::frames::EME2000; + + #[test] + fn format_frame() { + assert_eq!(format!("{EME2000}"), "Earth J2000"); + assert_eq!(format!("{EME2000:x}"), "Earth J2000"); + assert_eq!(format!("{EME2000:o}"), "J2000"); + assert_eq!(format!("{EME2000:e}"), "Earth"); + } +} diff --git a/src/frames/frameuid.rs b/src/frames/frameuid.rs new file mode 100644 index 00000000..878160a4 --- /dev/null +++ b/src/frames/frameuid.rs @@ -0,0 +1,76 @@ +/* + * ANISE Toolkit + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. + * + * Documentation: https://nyxspace.com/ + */ + +use crate::{ + constants::{ + celestial_objects::celestial_name_from_id, orientations::orientation_name_from_id, + }, + NaifId, +}; +use core::fmt; + +pub use super::Frame; + +/// A unique frame reference that only contains enough information to build the actual Frame object. +/// It cannot be used for any computations, is it be used in any structure apart from error structures. +/// +/// # Usage note +/// You should almost always prefer Frame over FrameRef unless you will +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct FrameUid { + pub ephemeris_id: NaifId, + pub orientation_id: NaifId, +} + +impl From for FrameUid { + fn from(frame: Frame) -> Self { + Self { + ephemeris_id: frame.ephemeris_id, + orientation_id: frame.orientation_id, + } + } +} + +impl From<&Frame> for FrameUid { + fn from(frame: &Frame) -> Self { + Self { + ephemeris_id: frame.ephemeris_id, + orientation_id: frame.orientation_id, + } + } +} + +impl From for Frame { + fn from(uid: FrameUid) -> Self { + Self::from_ephem_orient(uid.ephemeris_id, uid.orientation_id) + } +} + +impl From<&FrameUid> for Frame { + fn from(uid: &FrameUid) -> Self { + Self::from_ephem_orient(uid.ephemeris_id, uid.orientation_id) + } +} + +impl fmt::Display for FrameUid { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { + let body_name = match celestial_name_from_id(self.ephemeris_id) { + Some(name) => name.to_string(), + None => format!("body {}", self.ephemeris_id), + }; + + let orientation_name = match orientation_name_from_id(self.orientation_id) { + Some(name) => name.to_string(), + None => format!("orientation {}", self.orientation_id), + }; + + write!(f, "{body_name} {orientation_name}") + } +} diff --git a/src/frames/geodetic_frame.rs b/src/frames/geodetic_frame.rs deleted file mode 100644 index 835e8f8d..00000000 --- a/src/frames/geodetic_frame.rs +++ /dev/null @@ -1,126 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ - -use super::{celestial_frame::CelestialFrame, CelestialFrameTrait, Frame, FrameTrait}; -use crate::{ - almanac::Almanac, prelude::AniseError, structure::planetocentric::ellipsoid::Ellipsoid, NaifId, -}; -use core::fmt; - -/// Defines a Celestial Frame kind, which is a Frame that also defines a standard gravitational parameter -pub trait GeodeticFrameTrait: CelestialFrameTrait { - /// Equatorial radius in kilometers - fn mean_equatorial_radius_km(&self) -> f64; - /// Semi major radius in kilometers - fn semi_major_radius_km(&self) -> f64; - /// Flattening coefficient (unit less) - fn flattening(&self) -> f64; - /// Returns the average angular velocity of this frame - fn angular_velocity_deg_s(&self) -> f64; -} - -/// A GeodeticFrame is a Celestial Frame whose equatorial and semi major radii are defined. -#[derive(Copy, Clone, Debug, PartialEq)] -pub struct GeodeticFrame { - pub celestial_frame: CelestialFrame, - pub shape: Ellipsoid, - pub angular_velocity_deg: f64, -} - -impl FrameTrait for GeodeticFrame { - fn ephemeris_hash(&self) -> NaifId { - self.celestial_frame.ephemeris_hash() - } - - fn orientation_hash(&self) -> NaifId { - self.celestial_frame.orientation_hash() - } -} - -impl CelestialFrameTrait for GeodeticFrame { - fn mu_km3_s2(&self) -> f64 { - self.celestial_frame.mu_km3_s2() - } -} - -impl GeodeticFrameTrait for GeodeticFrame { - fn mean_equatorial_radius_km(&self) -> f64 { - self.shape.mean_equatorial_radius_km() - } - - fn semi_major_radius_km(&self) -> f64 { - self.shape.semi_major_equatorial_radius_km - } - - fn flattening(&self) -> f64 { - self.shape.flattening() - } - - fn angular_velocity_deg_s(&self) -> f64 { - self.angular_velocity_deg - } -} - -impl fmt::Display for GeodeticFrame { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { - write!(f, "{}", self.celestial_frame.frame)?; - write!(f, " (μ = {} km3/s, {})", self.mu_km3_s2(), self.shape) - } -} - -#[allow(clippy::from_over_into)] -impl Into for GeodeticFrame { - /// Lossy operation to convert FrameDetail into a Frame. - /// - /// This will cause the LOSS of the constants stored in the frame detail. - fn into(self) -> Frame { - self.celestial_frame.frame - } -} - -impl<'a> Almanac<'a> { - /// Tries to find the geodetic frame data given the ephemeris center name and the orientation name. - /// # Note - /// The ephemeris name MUST match the name of the planetary constant. - /// To load the planetary constants with another name, use `geodetic_frame_from` - pub fn geodetic_frame( - &self, - ephemeris_name: &'a str, - orientation_name: &'a str, - ) -> Result { - self.geodetic_frame_from(ephemeris_name, orientation_name, ephemeris_name) - } - - /// Tries to find the geodetic frame data given the ephemeris center name, the orientation name, and the name of the planetary constants - pub fn geodetic_frame_from( - &self, - _ephemeris_name: &'a str, - _orientation_name: &'a str, - _planetary_constants_name: &'a str, - ) -> Result { - todo!() - // let constants = self.planetary_constants_from_name(planetary_constants_name)?; - - // if constants.shape.is_none() { - // error!("no shape data associated with {planetary_constants_name}"); - // return Err(AniseError::ParameterNotSpecified); - // } - - // // TODO: Figure out how to specify / where to find the angular velocity. And maybe it shouldn't exist! - // Ok(GeodeticFrame { - // celestial_frame: CelestialFrame { - // frame: Frame::from_ephemeris_orientation_names(ephemeris_name, orientation_name), - // mu_km3_s2: constants.mu_km3_s2, - // }, - // shape: constants.shape.unwrap(), - // angular_velocity_deg: 0.0, - // }) - } -} diff --git a/src/frames/mod.rs b/src/frames/mod.rs index 976b37a8..498d93f3 100644 --- a/src/frames/mod.rs +++ b/src/frames/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,10 +8,8 @@ * Documentation: https://nyxspace.com/ */ -pub mod celestial_frame; -pub mod frame; -pub mod geodetic_frame; +mod frame; +mod frameuid; -pub use celestial_frame::{CelestialFrame, CelestialFrameTrait}; -pub use frame::{Frame, FrameTrait}; -pub use geodetic_frame::{GeodeticFrame, GeodeticFrameTrait}; +pub use frame::Frame; +pub use frameuid::FrameUid; diff --git a/src/lib.rs b/src/lib.rs index 6eb45cec..2675c7d9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -21,6 +21,7 @@ pub mod errors; pub mod frames; pub mod math; pub mod naif; +pub mod orientations; pub mod structure; /// Re-export of hifitime @@ -31,7 +32,7 @@ pub mod time { pub mod prelude { pub use crate::almanac::Almanac; pub use crate::astro::Aberration; - pub use crate::errors::AniseError; + pub use crate::errors::InputOutputError; pub use crate::frames::*; pub use crate::math::units::*; pub use crate::naif::daf::NAIFSummaryRecord; @@ -51,12 +52,12 @@ pub(crate) type NaifId = i32; macro_rules! file2heap { ($filename:tt) => { match File::open($filename) { - Err(e) => Err(AniseError::IOError(e.kind())), + Err(e) => Err(InputOutputError::IOError { kind: e.kind() }), Ok(file) => unsafe { use bytes::Bytes; use memmap2::MmapOptions; match MmapOptions::new().map(&file) { - Err(_) => Err(AniseError::IOUnknownError), + Err(_) => Err(InputOutputError::IOUnknownError), Ok(mmap) => { let bytes = Bytes::copy_from_slice(&mmap); Ok(bytes) @@ -72,11 +73,11 @@ macro_rules! file2heap { macro_rules! file_mmap { ($filename:tt) => { match File::open($filename) { - Err(e) => Err(AniseError::IOError(e.kind())), + Err(e) => Err(InputOutputError::IOError { kind: e.kind() }), Ok(file) => unsafe { use memmap2::MmapOptions; match MmapOptions::new().map(&file) { - Err(_) => Err(AniseError::IOUnknownError), + Err(_) => Err(InputOutputError::IOUnknownError), Ok(mmap) => Ok(mmap), } }, diff --git a/src/math/angles.rs b/src/math/angles.rs index eb7b08cb..a9d01859 100644 --- a/src/math/angles.rs +++ b/src/math/angles.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/math/cartesian.rs b/src/math/cartesian.rs index 2cceaaf8..d37f56ac 100644 --- a/src/math/cartesian.rs +++ b/src/math/cartesian.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,58 +8,56 @@ * Documentation: https://nyxspace.com/ */ -use core::ops::Add; - use super::{perpv, Vector3}; -use crate::prelude::{AniseError, Frame, FrameTrait}; +use crate::{ + astro::PhysicsResult, + errors::{EpochMismatchSnafu, FrameMismatchSnafu, PhysicsError}, + prelude::Frame, +}; +use core::fmt; +use core::ops::Add; use hifitime::Epoch; use nalgebra::Vector6; +use snafu::ensure; -/// Defines a Cartesian state in a given frame at a given epoch in a given time scale. +/// Defines a Cartesian state in a given frame at a given epoch in a given time scale. Radius data is expressed in kilometers. Velocity data is expressed in kilometers per second. +/// Regardless of the constructor used, this struct stores all the state information in Cartesian coordinates as these are always non singular. /// /// Unless noted otherwise, algorithms are from GMAT 2016a [StateConversionUtil.cpp](https://github.com/ChristopherRabotin/GMAT/blob/37201a6290e7f7b941bc98ee973a527a5857104b/src/base/util/StateConversionUtil.cpp). -/// Regardless of the constructor used, this struct stores all the state information in Cartesian coordinates -/// as these are always non singular. -/// _Note:_ although not yet supported, this struct may change once True of Date or other nutation frames -/// are added to the toolkit. #[derive(Copy, Clone, Debug)] -pub struct Cartesian { +pub struct CartesianState { /// Position radius in kilometers pub radius_km: Vector3, /// Velocity in kilometers per second pub velocity_km_s: Vector3, - /// Acceleration in kilometers per second squared - pub acceleration_km_s2: Option, /// Epoch with time scale at which this is valid. pub epoch: Epoch, /// Frame in which this Cartesian state lives. - pub frame: F, + pub frame: Frame, } -pub type CartesianState = Cartesian; - -impl Cartesian { - pub fn zero(frame: F) -> Self { +impl CartesianState { + /// Builds a state of zero radius and velocity at zero seconds TDB (01 Jan 2000, midnight TDB) in the provided frame. + pub fn zero(frame: Frame) -> Self { Self { radius_km: Vector3::zeros(), velocity_km_s: Vector3::zeros(), - acceleration_km_s2: None, epoch: Epoch::from_tdb_seconds(0.0), frame, } } - pub fn zero_as_epoch(epoch: Epoch, frame: F) -> Self { + /// Builds a state of zero radius and velocity at the provided epoch in the provided frame. + pub fn zero_at_epoch(epoch: Epoch, frame: Frame) -> Self { Self { radius_km: Vector3::zeros(), velocity_km_s: Vector3::zeros(), - acceleration_km_s2: None, epoch, frame, } } - /// Creates a new Cartesian state in the provided frame at the provided Epoch, and does not set its acceleration. + /// Creates a new Cartesian state in the provided frame at the provided Epoch. /// /// **Units:** km, km, km, km/s, km/s, km/s #[allow(clippy::too_many_arguments)] @@ -71,12 +69,11 @@ impl Cartesian { vy_km_s: f64, vz_km_s: f64, epoch: Epoch, - frame: F, + frame: Frame, ) -> Self { Self { radius_km: Vector3::new(x_km, y_km, z_km), velocity_km_s: Vector3::new(vx_km_s, vy_km_s, vz_km_s), - acceleration_km_s2: None, epoch, frame, } @@ -85,7 +82,7 @@ impl Cartesian { /// Creates a new Cartesian in the provided frame at the provided Epoch in time with 0.0 velocity. /// /// **Units:** km, km, km - pub fn from_position(x_km: f64, y_km: f64, z_km: f64, epoch: Epoch, frame: F) -> Self { + pub fn from_position(x_km: f64, y_km: f64, z_km: f64, epoch: Epoch, frame: Frame) -> Self { Self::new(x_km, y_km, z_km, 0.0, 0.0, 0.0, epoch, frame) } @@ -95,7 +92,7 @@ impl Cartesian { /// and as such it has the same unit requirements. /// /// **Units:** position data must be in kilometers, velocity data must be in kilometers per second. - pub fn from_cartesian_pos_vel(pos_vel: Vector6, epoch: Epoch, frame: F) -> Self { + pub fn from_cartesian_pos_vel(pos_vel: Vector6, epoch: Epoch, frame: Frame) -> Self { Self::new( pos_vel[0], pos_vel[1], pos_vel[2], pos_vel[3], pos_vel[4], pos_vel[5], epoch, frame, ) @@ -111,11 +108,6 @@ impl Cartesian { self.velocity_km_s.norm() } - /// Returns the magnitude of the acceleration vector in km/s^2 - pub fn amag_km_s2(&self) -> Option { - self.acceleration_km_s2.map(|accel| accel.norm()) - } - /// Returns a copy of the state with a new radius pub fn with_radius_km(self, new_radius_km: Vector3) -> Self { let mut me = self; @@ -142,14 +134,18 @@ impl Cartesian { ) } - /// Returns the distance in kilometers between this state and another state. - /// Will **panic** is the frames are different - pub fn distance_to(&self, other: &Self) -> f64 { - assert_eq!( - self.frame, other.frame, - "cannot compute the distance between two states in different frames" + /// Returns the distance in kilometers between this state and another state, if both frame match (epoch does not need to match). + pub fn distance_to(&self, other: &Self) -> PhysicsResult { + ensure!( + self.frame == other.frame, + FrameMismatchSnafu { + action: "translating states", + frame1: self.frame, + frame2: other.frame + } ); - self.distance_to_point_km(&other.radius_km) + + Ok(self.distance_to_point_km(&other.radius_km)) } /// Returns the distance in kilometers between this state and a point assumed to be in the same frame. @@ -180,42 +176,39 @@ impl Cartesian { } } -impl Add for Cartesian { - type Output = Result, AniseError>; +impl Add for CartesianState { + type Output = Result; /// Adds one state to another. This will return an error if the epochs or frames are different. - fn add(self, other: Cartesian) -> Self::Output { - if self.epoch != other.epoch { - return Err(AniseError::MathError( - crate::errors::MathErrorKind::StateEpochsDiffer, - )); - } else if self.frame != other.frame { - return Err(AniseError::MathError( - crate::errors::MathErrorKind::StateFramesDiffer, - )); - } + fn add(self, other: CartesianState) -> Self::Output { + ensure!( + self.epoch == other.epoch, + EpochMismatchSnafu { + action: "translating states", + epoch1: self.epoch, + epoch2: other.epoch + } + ); - Ok(Cartesian:: { + ensure!( + self.frame == other.frame, + FrameMismatchSnafu { + action: "translating states", + frame1: self.frame, + frame2: other.frame + } + ); + + Ok(CartesianState { radius_km: self.radius_km + other.radius_km, velocity_km_s: self.velocity_km_s + other.velocity_km_s, - acceleration_km_s2: if self.acceleration_km_s2.is_some() - && other.acceleration_km_s2.is_some() - { - Some(self.acceleration_km_s2.unwrap() + other.acceleration_km_s2.unwrap()) - } else if self.acceleration_km_s2.is_some() { - self.acceleration_km_s2 - } else if other.acceleration_km_s2.is_some() { - other.acceleration_km_s2 - } else { - None - }, epoch: self.epoch, frame: self.frame, }) } } -impl PartialEq for Cartesian { +impl PartialEq for CartesianState { /// Two states are equal if their position are equal within one centimeter and their velocities within one centimeter per second. fn eq(&self, other: &Self) -> bool { let radial_tol = 1e-5; // centimeter @@ -223,3 +216,137 @@ impl PartialEq for Cartesian { self.eq_within(other, radial_tol, velocity_tol) } } + +#[allow(clippy::format_in_format_args)] +impl fmt::Display for CartesianState { + // Prints as Cartesian in floating point with units + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let decimals = f.precision().unwrap_or(6); + write!( + f, + "[{:x}] {}\tposition = [{}, {}, {}] km\tvelocity = [{}, {}, {}] km/s", + self.frame, + self.epoch, + format!("{:.*}", decimals, self.radius_km.x), + format!("{:.*}", decimals, self.radius_km.y), + format!("{:.*}", decimals, self.radius_km.z), + format!("{:.*}", decimals, self.velocity_km_s.x), + format!("{:.*}", decimals, self.velocity_km_s.y), + format!("{:.*}", decimals, self.velocity_km_s.z) + ) + } +} + +#[allow(clippy::format_in_format_args)] +impl fmt::LowerExp for CartesianState { + // Prints as Cartesian in scientific notation with units + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let decimals = f.precision().unwrap_or(6); + write!( + f, + "[{:x}] {}\tposition = [{}, {}, {}] km\tvelocity = [{}, {}, {}] km/s", + self.frame, + self.epoch, + format!("{:.*e}", decimals, self.radius_km.x), + format!("{:.*e}", decimals, self.radius_km.y), + format!("{:.*e}", decimals, self.radius_km.z), + format!("{:.*e}", decimals, self.velocity_km_s.x), + format!("{:.*e}", decimals, self.velocity_km_s.y), + format!("{:.*e}", decimals, self.velocity_km_s.z) + ) + } +} + +#[cfg(test)] +mod cartesian_state_ut { + use std::f64::EPSILON; + + use hifitime::{Epoch, TimeUnits}; + + use crate::constants::frames::{EARTH_J2000, VENUS_J2000}; + use crate::errors::PhysicsError; + use crate::math::Vector6; + + use super::CartesianState; + + #[test] + fn add_wrong_epoch() { + let e = Epoch::now().unwrap(); + let e2 = e + 1.seconds(); + let frame = EARTH_J2000; + let s1 = CartesianState::new(10.0, 20.0, 30.0, 1.0, 2.0, 2.0, e, frame); + let s2 = CartesianState::new(10.0, 20.0, 30.0, 1.0, 2.0, 2.0, e2, frame); + + assert_eq!( + s1 + s2, + Err(PhysicsError::EpochMismatch { + action: "translating states", + epoch1: e, + epoch2: e2, + }) + ) + } + + #[test] + fn add_wrong_frame() { + let e = Epoch::now().unwrap(); + let frame = EARTH_J2000; + let frame2 = VENUS_J2000; + let s1 = CartesianState::new(10.0, 20.0, 30.0, 1.0, 2.0, 2.0, e, frame); + let s2 = CartesianState::new(10.0, 20.0, 30.0, 1.0, 2.0, 2.0, e, frame2); + + assert_eq!( + s1 + s2, + Err(PhysicsError::FrameMismatch { + action: "translating states", + frame1: frame.into(), + frame2: frame2.into(), + }) + ) + } + + #[test] + fn add_nominal() { + let e = Epoch::now().unwrap(); + let frame = EARTH_J2000; + let s1 = CartesianState::new(10.0, 20.0, 30.0, 1.0, 2.0, 2.0, e, frame); + let s2 = CartesianState::new(10.0, 20.0, 30.0, 1.0, 2.0, 2.0, e, frame); + let s3 = CartesianState::new(20.0, 40.0, 60.0, 2.0, 4.0, 4.0, e, frame); + + assert_eq!(s1 + s2, Ok(s3)); + + assert_eq!(format!("{s1}"), format!("[Earth J2000] {e}\tposition = [10.000000, 20.000000, 30.000000] km\tvelocity = [1.000000, 2.000000, 2.000000] km/s")); + assert_eq!(format!("{s1:e}"), format!("[Earth J2000] {e}\tposition = [1.000000e1, 2.000000e1, 3.000000e1] km\tvelocity = [1.000000e0, 2.000000e0, 2.000000e0] km/s")); + } + + #[test] + fn distance() { + let e = Epoch::now().unwrap(); + let frame = EARTH_J2000; + let s1 = CartesianState::new(10.0, 20.0, 30.0, 1.0, 2.0, 2.0, e, frame); + let s2 = CartesianState::new(10.0, 20.0, 30.0, 1.0, 2.0, 2.0, e, frame); + + assert!(s1.distance_to(&s2).unwrap().abs() < EPSILON); + + let as_vec6 = Vector6::new(10.0, 20.0, 30.0, 1.0, 2.0, 2.0); + assert_eq!(s1.to_cartesian_pos_vel(), as_vec6); + + assert_eq!( + CartesianState::from_cartesian_pos_vel(as_vec6, e, frame), + s1 + ); + } + + #[test] + fn zeros() { + let e = Epoch::now().unwrap(); + let frame = EARTH_J2000; + let s = CartesianState::zero(frame); + + // We cannot call the orbital momentum magnitude if the radius is zero. + assert!(s.hmag().is_err()); + + let s = CartesianState::zero_at_epoch(e, frame); + assert!(s.hmag().is_err()); + } +} diff --git a/src/math/interpolation/chebyshev.rs b/src/math/interpolation/chebyshev.rs index 5913d751..0b0c06a2 100644 --- a/src/math/interpolation/chebyshev.rs +++ b/src/math/interpolation/chebyshev.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,10 +8,12 @@ * Documentation: https://nyxspace.com/ */ -use crate::{errors::MathErrorKind, prelude::AniseError}; +use crate::errors::MathError; use core::f64::EPSILON; use hifitime::Epoch; +use super::InterpolationError; + /// Attempts to evaluate a Chebyshev polynomial given the coefficients, returning the value and its derivative /// /// # Notes @@ -22,9 +24,13 @@ pub(crate) fn chebyshev_eval( spline_radius_s: f64, eval_epoch: Epoch, degree: usize, -) -> Result<(f64, f64), AniseError> { +) -> Result<(f64, f64), InterpolationError> { if spline_radius_s.abs() < EPSILON { - return Err(AniseError::MathError(MathErrorKind::DivisionByZero)); + return Err(InterpolationError::InterpMath { + source: MathError::DivisionByZero { + action: "spline radius in Chebyshev eval is zero", + }, + }); } // Workspace arrays let mut w = [0.0_f64; 3]; @@ -35,7 +41,7 @@ pub(crate) fn chebyshev_eval( w[1] = w[0]; w[0] = (spline_coeffs .get(j - 1) - .ok_or(AniseError::MissingInterpolationData(eval_epoch))?) + .ok_or(InterpolationError::MissingInterpolationData { epoch: eval_epoch })?) + (2.0 * normalized_time * w[1] - w[2]); dw[2] = dw[1]; @@ -45,7 +51,7 @@ pub(crate) fn chebyshev_eval( let val = (spline_coeffs .first() - .ok_or(AniseError::MissingInterpolationData(eval_epoch))?) + .ok_or(InterpolationError::MissingInterpolationData { epoch: eval_epoch })?) + (normalized_time * w[0] - w[1]); let deriv = (w[0] + normalized_time * dw[0] - dw[1]) / spline_radius_s; diff --git a/src/math/interpolation/hermite.rs b/src/math/interpolation/hermite.rs index 4cc8ffbd..a43b23d7 100644 --- a/src/math/interpolation/hermite.rs +++ b/src/math/interpolation/hermite.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -61,10 +61,10 @@ use core::f64::EPSILON; -use crate::errors::MathErrorKind; +use crate::errors::MathError; use log::error; -use super::MAX_SAMPLES; +use super::{InterpolationError, MAX_SAMPLES}; /// From the abscissas (xs), the ordinates (ys), and the first derivatives (ydots), build the Hermite interpolation of the function and evaluate it at the requested abscissa (x). /// @@ -77,16 +77,20 @@ pub fn hermite_eval( ys: &[f64], ydots: &[f64], x_eval: f64, -) -> Result<(f64, f64), MathErrorKind> { +) -> Result<(f64, f64), InterpolationError> { if xs.len() != ys.len() || xs.len() != ydots.len() { - error!("Abscissas (xs), ordinates (ys), and first derivatives (ydots) must contain the same number of items, but they are of lengths {}, {}, and {}", xs.len(), ys.len(), ydots.len()); - return Err(MathErrorKind::InvalidInterpolationData); + return Err(InterpolationError::CorruptedData { + what: "lengths of abscissas (xs), ordinates (ys), and first derivatives (ydots) differ", + }); } else if xs.is_empty() { - error!("No interpolation data provided"); - return Err(MathErrorKind::InvalidInterpolationData); + return Err(InterpolationError::CorruptedData { + what: "list of abscissas (xs) is empty", + }); } else if xs.len() > MAX_SAMPLES { error!("More than {MAX_SAMPLES} samples provided, which is the maximum number of items allowed for a Hermite interpolation"); - return Err(MathErrorKind::InvalidInterpolationData); + return Err(InterpolationError::CorruptedData { + what: "list of abscissas (xs) contains more items than MAX_SAMPLES (32)", + }); } // At this point, we know that the lengths of items is correct, so we can directly address them without worry for overflowing the array. @@ -118,7 +122,12 @@ pub fn hermite_eval( let c2 = x_eval - xs[i - 1]; let denom = xs[i] - xs[i - 1]; if denom.abs() < EPSILON { - return Err(MathErrorKind::DivisionByZero); + return Err(InterpolationError::InterpMath { + source: MathError::DivisionByZero { + action: + "hermite data contains likely duplicate abcissa, remove duplicate states", + }, + }); } /* The second column of WORK contains interpolated derivative */ @@ -173,7 +182,11 @@ pub fn hermite_eval( let c2 = x_eval - xs[xi - 1]; let denom = xs[xij - 1] - xs[xi - 1]; if denom.abs() < EPSILON { - return Err(MathErrorKind::DivisionByZero); + return Err(InterpolationError::InterpMath { + source:MathError::DivisionByZero { + action: + "hermite data contains likely duplicate abcissa, remove duplicate states", + }}); } /* Compute the interpolated derivative at X for the Ith */ diff --git a/src/math/interpolation/mod.rs b/src/math/interpolation/mod.rs index 8e029546..1833a4d7 100644 --- a/src/math/interpolation/mod.rs +++ b/src/math/interpolation/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -13,7 +13,36 @@ mod hermite; pub(crate) use chebyshev::chebyshev_eval; pub(crate) use hermite::hermite_eval; +use hifitime::Epoch; +use snafu::Snafu; + +use crate::errors::{DecodingError, MathError}; /// Defines the maximum degree for an interpolation. /// Until https://github.com/rust-lang/rust/issues/60551 , we cannot do operations on const generic, so we need some hack around it. pub(crate) const MAX_SAMPLES: usize = 32; + +#[derive(Debug, Snafu, PartialEq)] +#[snafu(visibility(pub(crate)))] +pub enum InterpolationError { + #[snafu(display("decoding error during interpolation: {source}"))] + InterpDecoding { + #[snafu(backtrace)] + source: DecodingError, + }, + #[snafu(display("math error during interpolation: {source}"))] + InterpMath { + #[snafu(backtrace)] + source: MathError, + }, + #[snafu(display("spline valid from {start} to {end} but requested {req}"))] + NoInterpolationData { + req: Epoch, + start: Epoch, + end: Epoch, + }, + #[snafu(display("no interpolation data to {epoch}, but prior checks suceeded (check integrity of the data?)"))] + MissingInterpolationData { epoch: Epoch }, + #[snafu(display("interpolation data corrupted: {what}"))] + CorruptedData { what: &'static str }, +} diff --git a/src/math/mod.rs b/src/math/mod.rs index 1d0c0b0b..efcd5ac7 100644 --- a/src/math/mod.rs +++ b/src/math/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -18,10 +18,8 @@ pub type Matrix6 = nalgebra::Matrix6; pub mod angles; pub mod cartesian; pub mod interpolation; -pub mod polyfit; pub mod rotation; pub mod units; -pub mod utils; /// Returns the projection of a onto b pub fn projv(a: &Vector3, b: &Vector3) -> Vector3 { diff --git a/src/math/polyfit/cdemo/Makefile b/src/math/polyfit/cdemo/Makefile deleted file mode 100644 index f4a5630c..00000000 --- a/src/math/polyfit/cdemo/Makefile +++ /dev/null @@ -1,4 +0,0 @@ -it: - gcc -Wall -shared -o libhrmint.so hrmint.c -fPIC -g - gcc -Wall main.c -o main -lm -L. -lhrmint - LD_LIBRARY_PATH=. ./main \ No newline at end of file diff --git a/src/math/polyfit/cdemo/hrmint.c b/src/math/polyfit/cdemo/hrmint.c deleted file mode 100644 index 42aae034..00000000 --- a/src/math/polyfit/cdemo/hrmint.c +++ /dev/null @@ -1,447 +0,0 @@ -/* hrmint.f -- translated by f2c (version 19980913). - You must link the resulting object file with the libraries: - -lf2c -lm (in that order) -*/ - -// #include "f2c.h" -#include -typedef int integer; -typedef double doublereal; - -/* $Procedure HRMINT ( Hermite polynomial interpolation ) */ -/* Subroutine */ int hrmint_(integer* n, doublereal* xvals, doublereal* yvals, - doublereal* x, doublereal* work, doublereal* f, - doublereal* df) -{ - /* System generated locals */ - // integer xvals_dim1, yvals_dim1, work_dim1, work_offset, i__1, i__2, i__3, - // i__4, i__5, i__6, i__7; - integer work_dim1, work_offset; - - /* Builtin functions */ - integer s_rnge(char*, integer, char*, integer); - - /* Local variables */ - doublereal temp; - integer this__, prev, next, i__, j; - // extern /* Subroutine */ int chkin_(char*, ftnlen); - doublereal denom; - // extern /* Subroutine */ int errdp_(char*, doublereal*, ftnlen); - doublereal c1, c2; - integer xi; - // extern /* Subroutine */ int sigerr_(char*, ftnlen), chkout_(char*, ftnlen), - // setmsg_(char*, ftnlen), errint_(char*, integer*, ftnlen); - // extern logical return_(void); - integer xij; - - /* $ Abstract */ - - /* Evaluate a Hermite interpolating polynomial at a specified */ - /* abscissa value. */ - - /* $ Disclaimer */ - - /* THIS SOFTWARE AND ANY RELATED MATERIALS WERE CREATED BY THE */ - /* CALIFORNIA INSTITUTE OF TECHNOLOGY (CALTECH) UNDER A U.S. */ - /* GOVERNMENT CONTRACT WITH THE NATIONAL AERONAUTICS AND SPACE */ - /* ADMINISTRATION (NASA). THE SOFTWARE IS TECHNOLOGY AND SOFTWARE */ - /* PUBLICLY AVAILABLE UNDER U.S. EXPORT LAWS AND IS PROVIDED "AS-IS" */ - /* TO THE RECIPIENT WITHOUT WARRANTY OF ANY KIND, INCLUDING ANY */ - /* WARRANTIES OF PERFORMANCE OR MERCHANTABILITY OR FITNESS FOR A */ - /* PARTICULAR USE OR PURPOSE (AS SET FORTH IN UNITED STATES UCC */ - /* SECTIONS 2312-2313) OR FOR ANY PURPOSE WHATSOEVER, FOR THE */ - /* SOFTWARE AND RELATED MATERIALS, HOWEVER USED. */ - - /* IN NO EVENT SHALL CALTECH, ITS JET PROPULSION LABORATORY, OR NASA */ - /* BE LIABLE FOR ANY DAMAGES AND/OR COSTS, INCLUDING, BUT NOT */ - /* LIMITED TO, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, */ - /* INCLUDING ECONOMIC DAMAGE OR INJURY TO PROPERTY AND LOST PROFITS, */ - /* REGARDLESS OF WHETHER CALTECH, JPL, OR NASA BE ADVISED, HAVE */ - /* REASON TO KNOW, OR, IN FACT, SHALL KNOW OF THE POSSIBILITY. */ - - /* RECIPIENT BEARS ALL RISK RELATING TO QUALITY AND PERFORMANCE OF */ - /* THE SOFTWARE AND ANY RELATED MATERIALS, AND AGREES TO INDEMNIFY */ - /* CALTECH AND NASA FOR ALL THIRD-PARTY CLAIMS RESULTING FROM THE */ - /* ACTIONS OF RECIPIENT IN THE USE OF THE SOFTWARE. */ - - /* $ Required_Reading */ - - /* None. */ - - /* $ Keywords */ - - /* INTERPOLATION */ - /* POLYNOMIAL */ - - /* $ Declarations */ - /* $ Brief_I/O */ - - /* Variable I/O Description */ - /* -------- --- -------------------------------------------------- */ - /* N I Number of points defining the polynomial. */ - /* XVALS I Abscissa values. */ - /* YVALS I Ordinate and derivative values. */ - /* X I Point at which to interpolate the polynomial. */ - /* WORK I-O Work space array. */ - /* F O Interpolated function value at X. */ - /* DF O Interpolated function's derivative at X. */ - - /* $ Detailed_Input */ - - /* N is the number of points defining the polynomial. */ - /* The arrays XVALS and YVALS contain N and 2*N */ - /* elements respectively. */ - - /* XVALS is an array of length N containing abscissa values. */ - - /* YVALS is an array of length 2*N containing ordinate and */ - /* derivative values for each point in the domain */ - /* defined by FIRST, STEP, and N. The elements */ - - /* YVALS( 2*I - 1 ) */ - /* YVALS( 2*I ) */ - - /* give the value and first derivative of the output */ - /* polynomial at the abscissa value */ - - /* XVALS(I) */ - - /* where I ranges from 1 to N. */ - - /* WORK is a work space array. It is used by this routine */ - /* as a scratch area to hold intermediate results. */ - - /* X is the abscissa value at which the interpolating */ - /* polynomial and its derivative are to be evaluated. */ - - /* $ Detailed_Output */ - - /* F, */ - /* DF are the value and derivative at X of the unique */ - /* polynomial of degree 2N-1 that fits the points and */ - /* derivatives defined by XVALS and YVALS. */ - - /* $ Parameters */ - - /* None. */ - - /* $ Exceptions */ - - /* 1) If two input abscissas are equal, the error */ - /* SPICE(DIVIDEBYZERO) will be signaled. */ - - /* 2) If N is less than 1, the error SPICE(INVALIDSIZE) is */ - /* signaled. */ - - /* 3) This routine does not attempt to ward off or diagnose */ - /* arithmetic overflows. */ - - /* $ Files */ - - /* None. */ - - /* $ Particulars */ - - /* Users of this routine must choose the number of points to use */ - /* in their interpolation method. The authors of Reference [1] have */ - /* this to say on the topic: */ - - /* Unless there is solid evidence that the interpolating function */ - /* is close in form to the true function f, it is a good idea to */ - /* be cautious about high-order interpolation. We */ - /* enthusiastically endorse interpolations with 3 or 4 points, we */ - /* are perhaps tolerant of 5 or 6; but we rarely go higher than */ - /* that unless there is quite rigorous monitoring of estimated */ - /* errors. */ - - /* The same authors offer this warning on the use of the */ - /* interpolating function for extrapolation: */ - - /* ...the dangers of extrapolation cannot be overemphasized: */ - /* An interpolating function, which is perforce an extrapolating */ - /* function, will typically go berserk when the argument x is */ - /* outside the range of tabulated values by more than the typical */ - /* spacing of tabulated points. */ - - /* $ Examples */ - - /* 1) Fit a 7th degree polynomial through the points ( x, y, y' ) */ - - /* ( -1, 6, 3 ) */ - /* ( 0, 5, 0 ) */ - /* ( 3, 2210, 5115 ) */ - /* ( 5, 78180, 109395 ) */ - - /* and evaluate this polynomial at x = 2. */ - - /* PROGRAM TEST_HRMINT */ - - /* DOUBLE PRECISION ANSWER */ - /* DOUBLE PRECISION DERIV */ - /* DOUBLE PRECISION XVALS (4) */ - /* DOUBLE PRECISION YVALS (8) */ - /* DOUBLE PRECISION WORK (8,2) */ - /* INTEGER N */ - - /* N = 4 */ - - /* XVALS(1) = -1.D0 */ - /* XVALS(2) = 0.D0 */ - /* XVALS(3) = 3.D0 */ - /* XVALS(4) = 5.D0 */ - - /* YVALS(1) = 6.D0 */ - /* YVALS(2) = 3.D0 */ - /* YVALS(3) = 5.D0 */ - /* YVALS(4) = 0.D0 */ - /* YVALS(5) = 2210.D0 */ - /* YVALS(6) = 5115.D0 */ - /* YVALS(7) = 78180.D0 */ - /* YVALS(8) = 109395.D0 */ - - /* CALL HRMINT ( N, XVALS, YVALS, 2.D0, WORK, ANSWER, DERIV ) */ - - /* WRITE (*,*) 'ANSWER = ', ANSWER */ - /* WRITE (*,*) 'DERIV = ', DERIV */ - /* END */ - - /* The returned value of ANSWER should be 141.D0, and the returned */ - /* derivative value should be 456.D0, since the unique 7th degree */ - /* polynomial that fits these constraints is */ - - /* 7 2 */ - /* f(x) = x + 2x + 5 */ - - /* $ Restrictions */ - - /* None. */ - - /* $ Literature_References */ - - /* [1] "Numerical Recipes---The Art of Scientific Computing" by */ - /* William H. Press, Brian P. Flannery, Saul A. Teukolsky, */ - /* William T. Vetterling (see sections 3.0 and 3.1). */ - - /* [2] "Elementary Numerical Analysis---An Algorithmic Approach" */ - /* by S. D. Conte and Carl de Boor. See p. 64. */ - - /* $ Author_and_Institution */ - - /* N.J. Bachman (JPL) */ - - /* $ Version */ - - /* - SPICELIB Version 1.2.1, 28-JAN-2014 (NJB) */ - - /* Fixed a few comment typos. */ - - /* - SPICELIB Version 1.2.0, 01-FEB-2002 (NJB) (EDW) */ - - /* Bug fix: declarations of local variables XI and XIJ */ - /* were changed from DOUBLE PRECISION to INTEGER. */ - /* Note: bug had no effect on behavior of this routine. */ - - /* - SPICELIB Version 1.1.0, 28-DEC-2001 (NJB) */ - - /* Blanks following final newline were truncated to */ - /* suppress compilation warnings on the SGI-N32 platform. */ - - /* - SPICELIB Version 1.0.0, 01-MAR-2000 (NJB) */ - - /* -& */ - /* $ Index_Entries */ - - /* interpolate function using Hermite polynomial */ - /* Hermite interpolation */ - - /* -& */ - - /* SPICELIB functions */ - - /* Local variables */ - - /* Check in only if an error is detected. */ - - /* Parameter adjustments */ - work_dim1 = *n * 2; - work_offset = work_dim1 + 1; - // yvals_dim1 = *n * 2; - // xvals_dim1 = *n; - - /* Function Body */ - - /* No data, no interpolation. */ - - if (*n < 1) { - printf("Array size must be positive; was #%d", *n); - return 0; - } - - /* Copy the input array into WORK. After this, the first column */ - /* of WORK represents the first column of our triangular */ - /* interpolation table. */ - - // i__1 = *n * 2; - for (i__ = 1; i__ <= *n * 2; ++i__) { - // work[(i__2 = i__ + work_dim1 - work_offset)] = yvals[(i__3 = i__ - 1)]; - work[(i__ + work_dim1 - work_offset)] = yvals[(i__ - 1)]; - } - - printf("[0] { "); - size_t fj; - for (size_t j = 0; j < 256; j++) { - double val = work[j]; - if (val == 0) { - break; - } - printf("%f ", val); - fj = j; - } - printf("} (items = %ld)\n", fj); - - /* Compute the second column of the interpolation table: this */ - /* consists of the N-1 values obtained by evaluating the */ - /* first-degree interpolants at X. We'll also evaluate the */ - /* derivatives of these interpolants at X and save the results in */ - /* the second column of WORK. Because the derivative computations */ - /* depend on the function computations from the previous column in */ - /* the interpolation table, and because the function interpolation */ - /* overwrites the previous column of interpolated function values, */ - /* we must evaluate the derivatives first. */ - - // i__1 = *n - 1; - double ts[256] = { 0 }; - for (i__ = 1; i__ <= *n - 1; ++i__) { - c1 = xvals[i__] - *x; - c2 = *x - xvals[i__ - 1]; - denom = xvals[i__] - xvals[i__ - 1]; - - /* The second column of WORK contains interpolated derivative */ - /* values. */ - - /* The odd-indexed interpolated derivatives are simply the input */ - /* derivatives. */ - - prev = (i__ * 2) - 1; - this__ = prev + 1; - next = this__ + 1; - work[prev + (work_dim1 * 2) - work_offset] = work[this__ + work_dim1 - work_offset]; - printf("set work[%d] = work[%d]\n", prev + (work_dim1 * 2) - work_offset, (this__ + work_dim1 - work_offset)); - - /* The even-indexed interpolated derivatives are the slopes of */ - /* the linear interpolating polynomials for adjacent input */ - /* abscissa/ordinate pairs. */ - - ts[i__ - 1] = (work[(next + work_dim1 - work_offset)] - work[(prev + work_dim1 - work_offset)]) / denom; - work[(this__ + (work_dim1 * 2) - work_offset)] = (work[(next + work_dim1 - work_offset)] - work[(prev + work_dim1 - work_offset)]) / denom; - // Calculate the difference between that time derivative and the input - double err = work[(this__ + (work_dim1 * 2) - work_offset)] - yvals[this__ - 1]; - printf("set work[%d] = (work[%d] - work[%d])/%f => %f\n", this__ + (work_dim1 * 2) - work_offset, prev + work_dim1 - work_offset, denom, err); - - /* The first column of WORK contains interpolated function values. */ - /* The odd-indexed entries are the linear Taylor polynomials, */ - /* for each input abscissa value, evaluated at X. */ - - temp = work[(this__ + work_dim1 - work_offset)] * (*x - xvals[(i__ - 1)]) + work[(prev + work_dim1 - work_offset)]; - work[(this__ + work_dim1 - work_offset)] = (c1 * work[(prev + work_dim1 - work_offset)] + c2 * work[(next + work_dim1 - work_offset)]) / denom; - work[(prev + work_dim1 - work_offset)] = temp; - } - - printf("[1] TS { "); - for (size_t j = 0; j < 256; j++) { - double val = ts[j]; - if (val == 0) { - break; - } - printf("%f ", val); - fj = j; - } - printf("} (items = %ld)\n", fj); - - printf("[1] { "); - for (size_t j = 0; j < 256; j++) { - double val = work[j]; - if (val == 0) { - break; - } - printf("%f ", val); - fj = j; - } - printf("} (items = %ld)\n", fj); - - /* The last column entries were not computed by the preceding loop; */ - /* compute them now. */ - - work[((*n * 2) - 1 + (work_dim1 * 2) - work_offset)] = work[((*n * 2) + work_dim1 - work_offset)]; - work[((*n * 2) - 1 + work_dim1 - work_offset)] = work[((*n * 2) + work_dim1 - work_offset)] * (*x - xvals[(*n - 1)]) + work[((*n * 2) - 1 + work_dim1 - work_offset)]; - printf("[2] { "); - for (size_t j = 0; j < 256; j++) { - double val = work[j]; - if (val == 0) { - break; - } - printf("%f ", val); - fj = j; - } - printf("} (items = %ld)\n", fj); - - /* Compute columns 3 through 2*N of the table. */ - - for (j = 2; j <= (*n * 2) - 1; ++j) { - for (i__ = 1; i__ <= (*n * 2) - j; ++i__) { - - /* In the theoretical construction of the interpolation table, - */ - /* there are 2*N abscissa values, since each input abcissa */ - /* value occurs with multiplicity two. In this theoretical */ - /* construction, the Jth column of the interpolation table */ - /* contains results of evaluating interpolants that span J+1 */ - /* consecutive abscissa values. The indices XI and XIJ below */ - /* are used to pick the correct abscissa values out of the */ - /* physical XVALS array, in which the abscissa values are not */ - /* repeated. */ - - xi = (i__ + 1) / 2; - xij = (i__ + j + 1) / 2; - c1 = xvals[(xij - 1)] - *x; - c2 = *x - xvals[(xi - 1)]; - denom = xvals[(xij - 1)] - xvals[(xi - 1)]; - - /* Compute the interpolated derivative at X for the Ith */ - /* interpolant. This is the derivative with respect to X of */ - /* the expression for the interpolated function value, which */ - /* is the second expression below. This derivative computation - */ - /* is done first because it relies on the interpolated */ - /* function values from the previous column of the */ - /* interpolation table. */ - - /* The derivative expression here corresponds to equation */ - /* 2.35 on page 64 in reference [2]. */ - - work[(i__ + (work_dim1 * 2) - work_offset)] = (c1 * work[(i__ + (work_dim1 * 2) - work_offset)] + c2 * work[(i__ + 1 + (work_dim1 * 2) - work_offset)] + (work[(i__ + 1 + work_dim1 - work_offset)] - work[(i__ + work_dim1 - work_offset)])) / denom; - - /* Compute the interpolated function value at X for the Ith */ - /* interpolant. */ - - work[(i__ + work_dim1 - work_offset)] = (c1 * work[(i__ + work_dim1 - work_offset)] + c2 * work[(i__ + 1 + work_dim1 - work_offset)]) / denom; - } - } - printf("[3] { "); - for (size_t j = 0; j < 256; j++) { - double val = work[j]; - if (val == 0) { - break; - } - printf("%f ", val); - fj = j; - } - printf("} (items = %ld)\n", fj); - - /* Our interpolated function value is sitting in WORK(1,1) at this */ - /* point. The interpolated derivative is located in WORK(1,2). */ - - *f = work[(work_dim1 + 1 - work_offset)]; - *df = work[((work_dim1 * 2) + 1 - work_offset)]; - return 0; -} /* hrmint_ */ diff --git a/src/math/polyfit/cdemo/main.c b/src/math/polyfit/cdemo/main.c deleted file mode 100644 index 2576761a..00000000 --- a/src/math/polyfit/cdemo/main.c +++ /dev/null @@ -1,30 +0,0 @@ -#include -#include - -typedef int integer; -typedef double doublereal; - -int hrmint_ (integer *n, doublereal *xvals, doublereal *yvals, doublereal *x, - doublereal *work, doublereal *f, doublereal *df); - -int -main () -{ - - double xvals[] = { -1.0, 0.0, 3.0, 5.0 }; - double yvals[] = { 6.0, 3.0, 5.0, 0.0, 2210.0, 5115.0, 78180.0, 109395.0 }; - double x = 2.0; - double f, df; - int n = 7; - - double work[256] = { 0 }; - double want_x = 8.9871033515359500e+02; - double want_vx = -1.2836208430532707e+00; - - int rslt = hrmint_ (&n, xvals, yvals, &x, work, &f, &df); - printf ("rslt = %d\n", rslt); - printf ("f = %f\tdf= %f\n", f, df); - printf ("Δf = %e\tΔdf= %e\n", fabs (f - want_x), fabs (df - want_vx)); - - return 0; -} \ No newline at end of file diff --git a/src/math/polyfit/mod.rs b/src/math/polyfit/mod.rs deleted file mode 100644 index f1f13861..00000000 --- a/src/math/polyfit/mod.rs +++ /dev/null @@ -1,42 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ - -use core::ops::{Index, IndexMut}; - -/// A FixedArray is a a way around allocating vectors when we don't know the exact size at compile time. -/// This will be made obsolete when https://github.com/rust-lang/rust/issues/60551 is merged into rust stable. -#[derive(Copy, Clone, Debug)] -pub struct F64TupleArray([[f64; N]; S]); - -impl F64TupleArray { - pub fn zeros() -> Self { - Self([[0.0; N]; S]) - } - - const fn indexes(&self, index: usize) -> (usize, usize) { - (index / N, index % N) - } -} - -impl Index for F64TupleArray { - type Output = f64; - - fn index(&self, index: usize) -> &Self::Output { - let (one, two) = self.indexes(index); - &self.0[one][two] - } -} - -impl IndexMut for F64TupleArray { - fn index_mut(&mut self, index: usize) -> &mut f64 { - let (one, two) = self.indexes(index); - &mut self.0[one][two] - } -} diff --git a/src/math/rotation/dcm.rs b/src/math/rotation/dcm.rs index cd316809..0486fd1f 100644 --- a/src/math/rotation/dcm.rs +++ b/src/math/rotation/dcm.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,8 +8,8 @@ * Documentation: https://nyxspace.com/ */ use crate::{ + errors::PhysicsError, math::{Matrix3, Matrix6, Vector3, Vector6}, - prelude::AniseError, NaifId, }; use nalgebra::Vector4; @@ -88,7 +88,7 @@ impl DCM { } /// Returns the 6x6 DCM to rotate a state, if the time derivative of this DCM exists. - pub fn state_dcm(&self) -> Result { + pub fn state_dcm(&self) -> Result { match self.rot_mat_dt { Some(mat_dt) => { let mut full_dcm = Matrix6::zeros(); @@ -104,7 +104,9 @@ impl DCM { Ok(full_dcm) } - None => Err(AniseError::ItemNotFound), + None => Err(PhysicsError::DCMMissingDerivative { + action: "building the 6x6 DCM matrix", + }), } } @@ -154,7 +156,7 @@ impl Mul for DCM { } impl Mul for DCM { - type Output = Result; + type Output = Result; /// Applying the matrix to a vector yields the vector's representation in the new coordinate system. fn mul(self, rhs: Vector6) -> Self::Output { diff --git a/src/math/rotation/mod.rs b/src/math/rotation/mod.rs index d7383c94..b666e30f 100644 --- a/src/math/rotation/mod.rs +++ b/src/math/rotation/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/math/rotation/mrp.rs b/src/math/rotation/mrp.rs index 740e3b84..11f3478b 100644 --- a/src/math/rotation/mrp.rs +++ b/src/math/rotation/mrp.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,9 +8,11 @@ * Documentation: https://nyxspace.com/ */ +use snafu::ensure; + use crate::{ + errors::{DivisionByZeroSnafu, MathError, OriginMismatchSnafu, PhysicsError}, math::{Matrix3, Vector3}, - prelude::AniseError, NaifId, }; use core::f64::EPSILON; @@ -78,21 +80,22 @@ impl MRP { /// # Returns /// /// The shadow MRP as a new instance of `MRP`. - pub fn shadow(&self) -> Result { - if self.is_singular() { - Err(AniseError::MathError( - crate::errors::MathErrorKind::DivisionByZero, - )) - } else { - let s_squared = self.s0 * self.s0 + self.s1 * self.s1 + self.s2 * self.s2; - Ok(MRP { - s0: -self.s0 / s_squared, - s1: -self.s1 / s_squared, - s2: -self.s2 / s_squared, - from: self.from, - to: self.to, - }) - } + pub fn shadow(&self) -> Result { + ensure!( + !self.is_singular(), + DivisionByZeroSnafu { + action: "cannot compute shadow MRP of a singular MRP" + } + ); + + let s_squared = self.s0 * self.s0 + self.s1 * self.s1 + self.s2 * self.s2; + Ok(MRP { + s0: -self.s0 / s_squared, + s1: -self.s1 / s_squared, + s2: -self.s2 / s_squared, + from: self.from, + to: self.to, + }) } /// Returns whether this MRP is singular. @@ -161,26 +164,28 @@ impl MRP { } /// Returns the relative MRP between self and the rhs MRP. - pub fn relative_to(&self, rhs: &Self) -> Result { - if self.from != rhs.from { - Err(AniseError::IncompatibleRotation { - from: self.from, - to: rhs.to, - }) - } else { - // Using the same notation as in Eq. 3.153 in Schaub and Junkins, 3rd edition - let s_prime = self; - let s_dprime = rhs; - let denom = 1.0 - + s_prime.norm_squared() * s_dprime.norm_squared() - + 2.0 * s_prime.as_vector().dot(&s_dprime.as_vector()); - let num1 = (1.0 - s_prime.norm_squared()) * s_dprime.as_vector(); - let num2 = -(1.0 - s_dprime.norm_squared()) * s_prime.as_vector(); - let num3 = 2.0 * s_dprime.as_vector().cross(&s_prime.as_vector()); - - let sigma = (num1 + num2 + num3) / denom; - Ok(Self::new(sigma[0], sigma[1], sigma[2], rhs.from, self.to)) - } + pub fn relative_to(&self, rhs: &Self) -> Result { + ensure!( + self.from == rhs.from, + OriginMismatchSnafu { + action: "computing relative MRP", + from1: self.from, + from2: rhs.from + } + ); + + // Using the same notation as in Eq. 3.153 in Schaub and Junkins, 3rd edition + let s_prime = self; + let s_dprime = rhs; + let denom = 1.0 + + s_prime.norm_squared() * s_dprime.norm_squared() + + 2.0 * s_prime.as_vector().dot(&s_dprime.as_vector()); + let num1 = (1.0 - s_prime.norm_squared()) * s_dprime.as_vector(); + let num2 = -(1.0 - s_dprime.norm_squared()) * s_prime.as_vector(); + let num3 = 2.0 * s_dprime.as_vector().cross(&s_prime.as_vector()); + + let sigma = (num1 + num2 + num3) / denom; + Ok(Self::new(sigma[0], sigma[1], sigma[2], rhs.from, self.to)) } } @@ -196,60 +201,63 @@ impl PartialEq for MRP { } impl Mul for MRP { - type Output = Result; + type Output = Result; fn mul(self, rhs: Self) -> Self::Output { - if self.to != rhs.from { - Err(AniseError::IncompatibleRotation { - from: rhs.from, - to: self.to, - }) - } else { - // Using the same notation as in Eq. 3.152 in Schaub and Junkins, 3rd edition - let s_prime = self; - let s_dprime = rhs; - let denom = 1.0 + s_prime.norm_squared() * s_dprime.norm_squared() - - 2.0 * s_prime.as_vector().dot(&s_dprime.as_vector()); - let num1 = (1.0 - s_prime.norm_squared()) * s_dprime.as_vector(); - let num2 = (1.0 - s_dprime.norm_squared()) * s_prime.as_vector(); - let num3 = -2.0 * s_dprime.as_vector().cross(&s_prime.as_vector()); - - let sigma = (num1 + num2 + num3) / denom; - Ok(Self::new(sigma[0], sigma[1], sigma[2], self.from, rhs.to)) - } + ensure!( + self.to == rhs.from, + OriginMismatchSnafu { + action: "composing MRPs", + from1: self.from, + from2: rhs.from + } + ); + + // Using the same notation as in Eq. 3.152 in Schaub and Junkins, 3rd edition + let s_prime = self; + let s_dprime = rhs; + let denom = 1.0 + s_prime.norm_squared() * s_dprime.norm_squared() + - 2.0 * s_prime.as_vector().dot(&s_dprime.as_vector()); + let num1 = (1.0 - s_prime.norm_squared()) * s_dprime.as_vector(); + let num2 = (1.0 - s_dprime.norm_squared()) * s_prime.as_vector(); + let num3 = -2.0 * s_dprime.as_vector().cross(&s_prime.as_vector()); + + let sigma = (num1 + num2 + num3) / denom; + Ok(Self::new(sigma[0], sigma[1], sigma[2], self.from, rhs.to)) } } impl TryFrom for MRP { - type Error = AniseError; + type Error = MathError; /// Try to convert a quaternion into its MRP representation /// /// # Failure cases /// + A zero rotation, as the associated MRP is singular fn try_from(q: Quaternion) -> Result { - if (1.0 + q.w).abs() < EPSILON { - Err(AniseError::MathError( - crate::errors::MathErrorKind::DivisionByZero, - )) - } else { - let s = Self { - from: q.from, - to: q.to, - s0: q.x / (1.0 + q.w), - s1: q.y / (1.0 + q.w), - s2: q.z / (1.0 + q.w), - } - .normalize(); - // We don't ever want to deal with singular MRPs, so check once more - if s.is_singular() { - Err(AniseError::MathError( - crate::errors::MathErrorKind::DivisionByZero, - )) - } else { - Ok(s) + ensure!( + (1.0 + q.w).abs() >= EPSILON, + DivisionByZeroSnafu { + action: "quaternion represents a zero rotation, which is a singular MRP" } + ); + + let s = Self { + from: q.from, + to: q.to, + s0: q.x / (1.0 + q.w), + s1: q.y / (1.0 + q.w), + s2: q.z / (1.0 + q.w), } + .normalize(); + // We don't ever want to deal with singular MRPs, so check once more + ensure!( + !s.is_singular(), + DivisionByZeroSnafu { + action: "MRP from quaternion is singular" + } + ); + Ok(s) } } diff --git a/src/math/rotation/quaternion.rs b/src/math/rotation/quaternion.rs index 84bde062..fa9781b5 100644 --- a/src/math/rotation/quaternion.rs +++ b/src/math/rotation/quaternion.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,11 +8,13 @@ * Documentation: https://nyxspace.com/ */ +use crate::errors::{OriginMismatchSnafu, PhysicsError}; use crate::math::rotation::EPSILON; -use crate::{math::Vector3, math::Vector4, prelude::AniseError, NaifId}; +use crate::{math::Vector3, math::Vector4, NaifId}; use core::fmt; use core::ops::Mul; use nalgebra::Matrix4x3; +use snafu::ensure; pub use core::f64::consts::{FRAC_PI_2, FRAC_PI_3, FRAC_PI_4, PI, TAU}; @@ -231,43 +233,45 @@ impl EulerParameter { } impl Mul for Quaternion { - type Output = Result; - - fn mul(self, rhs: Quaternion) -> Result { - if self.to != rhs.from { - Err(AniseError::IncompatibleRotation { - from: rhs.from, - to: self.to, - }) + type Output = Result; + + fn mul(self, rhs: Quaternion) -> Result { + ensure!( + self.to == rhs.from, + OriginMismatchSnafu { + action: "multiplying quaternions", + from1: self.from, + from2: rhs.from + } + ); + + let s = self.w * rhs.w - self.x * rhs.x - self.y * rhs.y - self.z * rhs.z; + let i = self.w * rhs.x + self.x * rhs.w + self.y * rhs.z - self.z * rhs.y; + let j = self.w * rhs.y - self.x * rhs.z + self.y * rhs.w + self.z * rhs.x; + let k = self.w * rhs.z + self.x * rhs.y - self.y * rhs.x + self.z * rhs.w; + + let (from, to) = if self.to == rhs.from && self.from == rhs.to { + // Then we don't change the frames + (self.from, self.to) } else { - let s = self.w * rhs.w - self.x * rhs.x - self.y * rhs.y - self.z * rhs.z; - let i = self.w * rhs.x + self.x * rhs.w + self.y * rhs.z - self.z * rhs.y; - let j = self.w * rhs.y - self.x * rhs.z + self.y * rhs.w + self.z * rhs.x; - let k = self.w * rhs.z + self.x * rhs.y - self.y * rhs.x + self.z * rhs.w; - - let (from, to) = if self.to == rhs.from && self.from == rhs.to { - // Then we don't change the frames - (self.from, self.to) - } else { - (self.from, rhs.to) - }; - - Ok(Quaternion { - w: s, - x: i, - y: j, - z: k, - from, - to, - }) - } + (self.from, rhs.to) + }; + + Ok(Quaternion { + w: s, + x: i, + y: j, + z: k, + from, + to, + }) } } impl Mul for &Quaternion { - type Output = Result; + type Output = Result; - fn mul(self, other: &Quaternion) -> Result { + fn mul(self, other: &Quaternion) -> Result { *self * *other } } @@ -374,7 +378,7 @@ mod ut_quaternion { let (uvec, angle_rad) = q1_to_q2.uvec_angle(); if uvec.norm() > EPSILON { - if angle < -PI || angle > PI { + if !(-PI..=PI).contains(&angle) { assert_eq!(uvec, -uvec_q1, "{angle}"); } else { assert_eq!(uvec, uvec_q1, "{angle}"); @@ -394,7 +398,7 @@ mod ut_quaternion { let (uvec, _angle_rad) = q2_to_q1.uvec_angle(); if uvec.norm() > EPSILON { - if angle >= -PI && angle <= PI { + if (-PI..=PI).contains(&angle) { assert_eq!(uvec, -uvec_q1, "{angle}"); } else { assert_eq!(uvec, uvec_q1, "{angle}"); diff --git a/src/math/units.rs b/src/math/units.rs index 236e7535..90c7b452 100644 --- a/src/math/units.rs +++ b/src/math/units.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/math/utils.rs b/src/math/utils.rs deleted file mode 100644 index 08a5a185..00000000 --- a/src/math/utils.rs +++ /dev/null @@ -1,57 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ - -use core::f64::EPSILON; - -/// Returns the absolute difference between two floats as per the approx crate -pub fn abs_diff(a: f64, b: f64) -> f64 { - if a > b { - a - b - } else { - b - a - } -} - -pub fn rel_diff(a: f64, b: f64, max_relative: f64) -> f64 { - if a == b { - return 0.0; - } - - // Handle remaining infinities - if a.is_infinite() || b.is_infinite() { - // We are far from equal so return a big number - return f64::INFINITY; - } - - let abs_diff = (a - b).abs(); - - // For when the numbers are really close together - if abs_diff <= EPSILON { - return abs_diff; - } - - let abs_a = a.abs(); - let abs_b = b.abs(); - - let largest = if abs_b > abs_a { abs_b } else { abs_a }; - - // Use a relative difference comparison - largest * max_relative -} - -// Normalize between -1.0 and 1.0 -pub fn normalize(x: f64, min_x: f64, max_x: f64) -> f64 { - 2.0 * (x - min_x) / (max_x - min_x) - 1.0 -} - -// Denormalize between -1.0 and 1.0 -pub fn denormalize(xp: f64, min_x: f64, max_x: f64) -> f64 { - (max_x - min_x) * (xp + 1.0) / 2.0 + min_x -} diff --git a/src/naif/daf/daf.rs b/src/naif/daf/daf.rs index a5749918..9b8dc205 100644 --- a/src/naif/daf/daf.rs +++ b/src/naif/daf/daf.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,15 +8,22 @@ * Documentation: https://nyxspace.com/ */ +use super::file_record::FileRecordError; +use super::{ + DAFError, DecodingNameSnafu, DecodingSummarySnafu, FileRecordSnafu, NAIFDataSet, NAIFRecord, + NAIFSummaryRecord, +}; pub use super::{FileRecord, NameRecord, SummaryRecord}; -use super::{NAIFDataSet, NAIFRecord, NAIFSummaryRecord}; -use crate::file2heap; -use crate::{errors::IntegrityErrorKind, prelude::AniseError, DBL_SIZE}; +use crate::errors::DecodingError; +use crate::naif::daf::DecodingDataSnafu; +use crate::{errors::IntegrityError, DBL_SIZE}; use bytes::Bytes; use core::hash::Hash; use core::ops::Deref; use hifitime::Epoch; use log::{error, trace, warn}; +use snafu::ResultExt; +use std::fmt::Debug; use std::marker::PhantomData; use zerocopy::AsBytes; @@ -36,10 +43,8 @@ macro_rules! io_imports { io_imports!(); pub(crate) const RCRD_LEN: usize = 1024; -#[derive(Clone, Default, Debug)] +#[derive(Clone, Default, Debug, PartialEq)] pub struct DAF { - pub file_record: FileRecord, - pub name_record: NameRecord, pub bytes: Bytes, pub crc32_checksum: u32, pub _daf_type: PhantomData, @@ -52,109 +57,141 @@ impl DAF { } /// Scrubs the data by computing the CRC32 of the bytes and making sure that it still matches the previously known hash - pub fn scrub(&self) -> Result<(), AniseError> { + pub fn scrub(&self) -> Result<(), IntegrityError> { if self.crc32() == self.crc32_checksum { Ok(()) } else { // Compiler will optimize the double computation away - Err(AniseError::IntegrityError( - IntegrityErrorKind::ChecksumInvalid { - expected: self.crc32_checksum, - computed: self.crc32(), - }, - )) + Err(IntegrityError::ChecksumInvalid { + expected: self.crc32_checksum, + computed: self.crc32(), + }) } } /// Parse the DAF only if the CRC32 checksum of the data is valid pub fn check_then_parse>( bytes: B, - expected_crc32: u32, - ) -> Result { - let computed_crc32 = crc32fast::hash(&bytes); - if computed_crc32 == expected_crc32 { - Self::parse(bytes) - } else { - Err(AniseError::IntegrityError( - IntegrityErrorKind::ChecksumInvalid { - expected: expected_crc32, - computed: computed_crc32, - }, - )) + expected: u32, + ) -> Result { + let computed = crc32fast::hash(&bytes); + if computed != expected { + return Err(DAFError::DAFIntegrity { + source: IntegrityError::ChecksumInvalid { expected, computed }, + }); } - } - pub fn load>(path: P) -> Result { - Self::parse(file2heap!(path)?) + Self::parse(bytes) } - pub fn from_static>(bytes: &'static B) -> Result { - let crc32_checksum = crc32fast::hash(bytes); - let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); - // Check that the endian-ness is compatible with this platform. - file_record.endianness()?; + pub fn load + Debug>(path: P) -> Result { + match File::open(&path) { + Err(source) => Err(DAFError::IO { + action: format!("loading {path:?}"), + source, + }), + Ok(file) => unsafe { + use memmap2::MmapOptions; + match MmapOptions::new().map(&file) { + Err(source) => Err(DAFError::IO { + action: format!("mmap of {path:?}"), + source, + }), + Ok(mmap) => { + let bytes = Bytes::copy_from_slice(&mmap); + Self::parse(bytes) + } + } + }, + } + } - // Move onto the next record. - let rcrd_idx = file_record.fwrd_idx() * RCRD_LEN; - let rcrd_bytes = bytes - .get(rcrd_idx..rcrd_idx + RCRD_LEN) - .ok_or_else(|| AniseError::MalformedData(file_record.fwrd_idx() + RCRD_LEN))?; - let name_record = NameRecord::read_from(rcrd_bytes).unwrap(); + /// Parse the provided static byte array as a SPICE Double Array File + pub fn from_static>(bytes: &'static B) -> Result { + Self::parse(Bytes::from_static(bytes)) + } - Ok(Self { - file_record, - name_record, - bytes: Bytes::from_static(bytes), + /// Parse the provided bytes as a SPICE Double Array File + pub fn parse>(bytes: B) -> Result { + let crc32_checksum = crc32fast::hash(&bytes); + let me = Self { + bytes: Bytes::copy_from_slice(&bytes), crc32_checksum, _daf_type: PhantomData, - }) + }; + // Check that these calls will succeed. + me.file_record()?; + me.name_record()?; + Ok(me) } - /// Parse the provided bytes as a SPICE Double Array File - pub fn parse>(bytes: B) -> Result { - let crc32_checksum = crc32fast::hash(&bytes); - let file_record = FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap(); + pub fn file_record(&self) -> Result { + let file_record = FileRecord::read_from(&self.bytes[..FileRecord::SIZE]).unwrap(); // Check that the endian-ness is compatible with this platform. - file_record.endianness()?; + file_record + .endianness() + .with_context(|_| FileRecordSnafu { kind: R::NAME })?; + Ok(file_record) + } - // Move onto the next record. - let rcrd_idx = file_record.fwrd_idx() * RCRD_LEN; - let rcrd_bytes = bytes + pub fn name_record(&self) -> Result { + let rcrd_idx = self.file_record()?.fwrd_idx() * RCRD_LEN; + let rcrd_bytes = self + .bytes .get(rcrd_idx..rcrd_idx + RCRD_LEN) - .ok_or_else(|| AniseError::MalformedData(file_record.fwrd_idx() + RCRD_LEN))?; - let name_record = NameRecord::read_from(rcrd_bytes).unwrap(); - - Ok(Self { - file_record, - name_record, - bytes: Bytes::copy_from_slice(&bytes), - crc32_checksum, - _daf_type: PhantomData, - }) + .ok_or_else(|| DecodingError::InaccessibleBytes { + start: rcrd_idx, + end: rcrd_idx + RCRD_LEN, + size: self.bytes.len(), + }) + .with_context(|_| DecodingNameSnafu { kind: R::NAME })?; + Ok(NameRecord::read_from(rcrd_bytes).unwrap()) } - pub fn daf_summary(&self) -> Result { - let rcrd_idx = (self.file_record.fwrd_idx() - 1) * RCRD_LEN; + pub fn daf_summary(&self) -> Result { + let rcrd_idx = (self.file_record()?.fwrd_idx() - 1) * RCRD_LEN; let rcrd_bytes = self .bytes .get(rcrd_idx..rcrd_idx + RCRD_LEN) - .ok_or_else(|| AniseError::MalformedData(self.file_record.fwrd_idx() + RCRD_LEN))?; + .ok_or_else(|| DecodingError::InaccessibleBytes { + start: rcrd_idx, + end: rcrd_idx + RCRD_LEN, + size: self.bytes.len(), + }) + .with_context(|_| DecodingSummarySnafu { kind: R::NAME })?; SummaryRecord::read_from(&rcrd_bytes[..SummaryRecord::SIZE]) - .ok_or(AniseError::MalformedData(SummaryRecord::SIZE)) + .ok_or(DecodingError::Casting) + .with_context(|_| DecodingSummarySnafu { kind: R::NAME }) } /// Parses the data summaries on the fly. - pub fn data_summaries(&self) -> Result<&[R], AniseError> { - if self.file_record.is_empty() { - return Err(AniseError::MalformedData(0)); + pub fn data_summaries(&self) -> Result<&[R], DAFError> { + if self.file_record()?.is_empty() { + return Err(DAFError::FileRecord { + kind: R::NAME, + source: FileRecordError::EmptyRecord, + }); } + // Move onto the next record, DAF indexes start at 1 ... =( - let rcrd_idx = (self.file_record.fwrd_idx() - 1) * RCRD_LEN; - let rcrd_bytes = self + let rcrd_idx = (self.file_record()?.fwrd_idx() - 1) * RCRD_LEN; + let rcrd_bytes = match self .bytes .get(rcrd_idx..rcrd_idx + RCRD_LEN) - .ok_or_else(|| AniseError::MalformedData(self.file_record.fwrd_idx() + RCRD_LEN))?; + .ok_or_else(|| DecodingError::InaccessibleBytes { + start: rcrd_idx, + end: rcrd_idx + RCRD_LEN, + size: self.bytes.len(), + }) { + Ok(it) => it, + Err(source) => { + return Err(DAFError::DecodingSummary { + kind: R::NAME, + source, + }) + } + }; // The summaries are defined in the same record as the DAF summary Ok(match Ref::new_slice(&rcrd_bytes[SummaryRecord::SIZE..]) { @@ -163,23 +200,13 @@ impl DAF { }) } - pub fn nth_summary(&self, n: usize) -> Result<(&str, &R), AniseError> { - let name = self - .name_record - .nth_name(n, self.file_record.summary_size()); - - let summary = &self.data_summaries()?[n]; - - Ok((name.trim(), summary)) - } - /// Returns the summary given the name of the summary record - pub fn summary_from_name(&self, name: &str) -> Result<(&R, usize), AniseError> { + pub fn summary_from_name(&self, name: &str) -> Result<(&R, usize), DAFError> { let idx = self - .name_record - .index_from_name(name, self.file_record.summary_size())?; + .name_record()? + .index_from_name::(name, self.file_record()?.summary_size())?; - Ok((self.nth_summary(idx)?.1, idx)) + Ok((&self.data_summaries()?[idx], idx)) } /// Returns the summary given the name of the summary record if that summary has data defined at the requested epoch @@ -187,34 +214,34 @@ impl DAF { &self, name: &str, epoch: Epoch, - ) -> Result<(&R, usize), AniseError> { + ) -> Result<(&R, usize), DAFError> { let (summary, idx) = self.summary_from_name(name)?; if epoch >= summary.start_epoch() && epoch <= summary.end_epoch() { Ok((summary, idx)) } else { error!("No summary {name} valid at epoch {epoch}"); - Err(AniseError::MissingInterpolationData(epoch)) + Err(DAFError::SummaryNameAtEpochError { + kind: R::NAME, + name: name.to_string(), + epoch, + }) } } - /// Returns the summary given the name of the summary record - pub fn summary_from_id(&self, id: i32) -> Result<(&R, usize), AniseError> { + /// Returns the summary given the id of the summary record + pub fn summary_from_id(&self, id: i32) -> Result<(&R, usize), DAFError> { for (idx, summary) in self.data_summaries()?.iter().enumerate() { if summary.id() == id { return Ok((summary, idx)); } } - Err(AniseError::ItemNotFound) + Err(DAFError::SummaryIdError { kind: R::NAME, id }) } /// Returns the summary given the name of the summary record if that summary has data defined at the requested epoch - pub fn summary_from_id_at_epoch( - &self, - id: i32, - epoch: Epoch, - ) -> Result<(&R, usize), AniseError> { + pub fn summary_from_id_at_epoch(&self, id: i32, epoch: Epoch) -> Result<(&R, usize), DAFError> { // NOTE: We iterate through the whole summary because a specific NAIF ID may be repeated in the summary for different valid epochs // so we can't just call `summary_from_id`. for (idx, summary) in self.data_summaries()?.iter().enumerate() { @@ -233,61 +260,94 @@ impl DAF { } } } - Err(AniseError::MissingInterpolationData(epoch)) + Err(DAFError::InterpolationDataErrorFromId { + kind: R::NAME, + id, + epoch, + }) } /// Provided a name that is in the summary, return its full data, if name is available. - pub fn data_from_name<'a, S: NAIFDataSet<'a>>(&'a self, name: &str) -> Result { + pub fn data_from_name<'a, S: NAIFDataSet<'a>>(&'a self, name: &str) -> Result { // O(N) search through the summaries - for idx in 0..self - .name_record - .num_entries(self.file_record.summary_size()) - { - let (this_name, _) = self.nth_summary(idx)?; + let name_rcrd = self.name_record()?; + for idx in 0..name_rcrd.num_entries(self.file_record()?.summary_size()) { + let this_name = name_rcrd.nth_name(idx, self.file_record()?.summary_size()); + if name.trim() == this_name.trim() { // Found it! return self.nth_data(idx); } } - Err(AniseError::DAFParserError(format!( - "Could not find data for {name}" - ))) + Err(DAFError::NameError { + kind: R::NAME, + name: name.to_string(), + }) } /// Provided a name that is in the summary, return its full data, if name is available. - pub fn nth_data<'a, S: NAIFDataSet<'a>>(&'a self, idx: usize) -> Result { - let (_, this_summary) = self.nth_summary(idx)?; + pub fn nth_data<'a, S: NAIFDataSet<'a>>(&'a self, idx: usize) -> Result { + let this_summary = &self.data_summaries()?[idx]; // Grab the data in native endianness (TODO: How to support both big and little endian?) trace!("{idx} -> {this_summary:?}"); - if this_summary.is_empty() { - return Err(AniseError::InternalError( - crate::errors::InternalErrorKind::Generic, - )); + if self.file_record()?.is_empty() { + return Err(DAFError::FileRecord { + kind: R::NAME, + source: FileRecordError::EmptyRecord, + }); } + + let start = (this_summary.start_index() - 1) * DBL_SIZE; + let end = this_summary.end_index() * DBL_SIZE; let data: &[f64] = Ref::new_slice( - self.bytes - .get( - (this_summary.start_index() - 1) * DBL_SIZE - ..this_summary.end_index() * DBL_SIZE, - ) - .ok_or_else(|| AniseError::MalformedData(this_summary.end_index() + RCRD_LEN))?, + match self + .bytes + .get(start..end) + .ok_or_else(|| DecodingError::InaccessibleBytes { + start, + end, + size: self.bytes.len(), + }) { + Ok(it) => it, + Err(source) => { + return Err(DAFError::DecodingData { + kind: R::NAME, + idx, + source, + }) + } + }, ) .unwrap() .into_slice(); // Convert it - S::from_slice_f64(data) + S::from_slice_f64(data).with_context(|_| DecodingDataSnafu { kind: R::NAME, idx }) + // S::from_slice_f64(data) } - pub fn comments(&self) -> Result, AniseError> { + pub fn comments(&self) -> Result, DAFError> { // TODO: This can be cleaned up to avoid allocating a string. In my initial tests there were a bunch of additional spaces, so I canceled those changes. let mut rslt = String::new(); // FWRD has the initial record of the summary. So we assume that all records between the second record and that one are comments - for rid in 1..self.file_record.fwrd_idx() { + for rid in 1..self.file_record()?.fwrd_idx() { match core::str::from_utf8( - self.bytes + match self + .bytes .get(rid * RCRD_LEN..(rid + 1) * RCRD_LEN) - .ok_or(AniseError::MalformedData((rid + 1) * RCRD_LEN))?, + .ok_or_else(|| DecodingError::InaccessibleBytes { + start: rid * RCRD_LEN, + end: (rid + 1) * RCRD_LEN, + size: self.bytes.len(), + }) { + Ok(it) => it, + Err(source) => { + return Err(DAFError::DecodingComments { + kind: R::NAME, + source, + }) + } + }, ) { Ok(s) => rslt += s.replace('\u{0}', "\n").trim(), Err(e) => { @@ -312,10 +372,10 @@ impl DAF { pub fn persist>(&self, path: P) -> IoResult<()> { let mut fs = File::create(path)?; - let mut file_rcrd = Vec::from(self.file_record.as_bytes()); + let mut file_rcrd = Vec::from(self.file_record().unwrap().as_bytes()); file_rcrd.extend(vec![ 0x0; - (self.file_record.fwrd_idx() - 1) * RCRD_LEN + (self.file_record().unwrap().fwrd_idx() - 1) * RCRD_LEN - file_rcrd.len() ]); fs.write_all(&file_rcrd)?; @@ -329,11 +389,11 @@ impl DAF { daf_summary.extend(vec![0x0; RCRD_LEN - daf_summary.len()]); fs.write_all(&daf_summary)?; - let mut name_rcrd = Vec::from(self.name_record.as_bytes()); + let mut name_rcrd = Vec::from(self.name_record().unwrap().as_bytes()); name_rcrd.extend(vec![0x0; RCRD_LEN - name_rcrd.len()]); fs.write_all(&name_rcrd)?; - fs.write_all(&self.bytes[self.file_record.fwrd_idx() * (2 * RCRD_LEN)..]) + fs.write_all(&self.bytes[self.file_record().unwrap().fwrd_idx() * (2 * RCRD_LEN)..]) } } @@ -343,3 +403,78 @@ impl Hash for DAF { self.bytes.hash(state); } } + +#[cfg(test)] +mod daf_ut { + use hifitime::Epoch; + + use crate::{ + errors::{InputOutputError, IntegrityError}, + file2heap, + naif::{daf::DAFError, spk::datatypes::HermiteSetType13, SPK}, + }; + + use std::fs::File; + + #[test] + fn crc32_errors() { + let mut traj = SPK::load("./data/gmat-hermite.bsp").unwrap(); + let nominal_crc = traj.crc32(); + + assert_eq!( + SPK::check_then_parse( + file2heap!("./data/gmat-hermite.bsp").unwrap(), + nominal_crc + 1 + ), + Err(DAFError::DAFIntegrity { + source: IntegrityError::ChecksumInvalid { + expected: nominal_crc + 1, + computed: nominal_crc + }, + }) + ); + + // Change the checksum of the traj and check that scrub fails + traj.crc32_checksum += 1; + assert_eq!( + traj.scrub(), + Err(IntegrityError::ChecksumInvalid { + expected: nominal_crc + 1, + computed: nominal_crc + }) + ); + } + + #[test] + fn summary_from_name() { + let epoch = Epoch::now().unwrap(); + let traj = SPK::load("./data/gmat-hermite.bsp").unwrap(); + + assert_eq!( + traj.summary_from_name_at_epoch("name", epoch), + Err(DAFError::NameError { + kind: "SPKSummaryRecord", + name: "name".to_string() + }) + ); + + // SPK_SEGMENT + + assert_eq!( + traj.summary_from_name_at_epoch("SPK_SEGMENT", epoch), + Err(DAFError::SummaryNameAtEpochError { + kind: "SPKSummaryRecord", + name: "SPK_SEGMENT".to_string(), + epoch + }) + ); + + if traj.nth_data::(0).unwrap() + != traj.data_from_name("SPK_SEGMENT").unwrap() + { + // We cannot user assert_eq! because the NAIF Data Set do not (and should not) impl Debug + // These data sets are the full record! + panic!("nth data test failed"); + } + } +} diff --git a/src/naif/daf/file_record.rs b/src/naif/daf/file_record.rs index 547ec1b5..2a1140b2 100644 --- a/src/naif/daf/file_record.rs +++ b/src/naif/daf/file_record.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,13 +8,37 @@ * Documentation: https://nyxspace.com/ */ +use std::str::Utf8Error; + +use snafu::prelude::*; use zerocopy::{AsBytes, FromBytes, FromZeroes}; -use crate::{naif::Endian, prelude::AniseError}; +use crate::naif::Endian; use log::error; use super::NAIFRecord; +#[derive(Debug, Snafu, PartialEq)] +#[snafu(visibility(pub(crate)))] +pub enum FileRecordError { + /// Endian of file does not match the endian order of the machine + WrongEndian, + /// Could not parse the endian flag or internal filename as a UTF8 string + ParsingError { + source: Utf8Error, + }, + /// Endian flag should be either `BIG-IEEE` or `LTL-IEEE` + InvalidEndian { + read: String, + }, + UnsupportedIdentifier { + loci: String, + }, + NotDAF, + NoIdentifier, + EmptyRecord, +} + #[derive(Debug, Clone, FromBytes, FromZeroes, AsBytes, PartialEq)] #[repr(C)] pub struct FileRecord { @@ -68,60 +92,51 @@ impl FileRecord { (self.nd + (self.ni + 1) / 2) as usize } - pub fn identification(&self) -> Result<&str, AniseError> { - let str_locidw = core::str::from_utf8(&self.id_str).map_err(|_| { - AniseError::DAFParserError("Could not parse identification string".to_owned()) - })?; + pub fn identification(&self) -> Result<&str, FileRecordError> { + let str_locidw = + core::str::from_utf8(&self.id_str).map_err(|_| FileRecordError::NoIdentifier)?; if &str_locidw[0..3] != "DAF" || str_locidw.chars().nth(3) != Some('/') { - Err(AniseError::DAFParserError(format!( - "Cannot parse file whose identifier is not DAF: `{}`", - str_locidw, - ))) + Err(FileRecordError::NotDAF) } else { - match str_locidw[4..].trim() { + let loci = str_locidw[4..].trim(); + match loci { "SPK" => Ok("SPK"), "PCK" => Ok("PCK"), _ => { error!("DAF of type `{}` is not yet supported", &str_locidw[4..]); - Err(AniseError::DAFParserError(format!( - "Cannot parse SPICE data of type `{}`", - str_locidw - ))) + Err(FileRecordError::UnsupportedIdentifier { + loci: loci.to_string(), + }) } } } } - pub fn endianness(&self) -> Result { - let str_endianness = core::str::from_utf8(&self.endian_str) - .map_err(|_| AniseError::DAFParserError("Could not parse endianness".to_owned()))?; + pub fn endianness(&self) -> Result { + let str_endianness = + core::str::from_utf8(&self.endian_str).with_context(|_| ParsingSnafu)?; let file_endian = if str_endianness == "LTL-IEEE" { Endian::Little } else if str_endianness == "BIG-IEEE" { Endian::Big } else { - return Err(AniseError::DAFParserError(format!( - "Could not understand endianness: `{}`", - str_endianness - ))); + return Err(FileRecordError::InvalidEndian { + read: str_endianness.to_string(), + }); }; if file_endian != Endian::f64_native() || file_endian != Endian::u64_native() { - Err(AniseError::DAFParserError( - "Input file has different endian-ness than the platform and cannot be decoded" - .to_string(), - )) + Err(FileRecordError::WrongEndian) } else { Ok(file_endian) } } - pub fn internal_filename(&self) -> Result<&str, AniseError> { - match core::str::from_utf8(&self.internal_filename) { - Ok(filename) => Ok(filename.trim()), - Err(e) => Err(AniseError::DAFParserError(format!("{e}"))), - } + pub fn internal_filename(&self) -> Result<&str, FileRecordError> { + Ok(core::str::from_utf8(&self.internal_filename) + .with_context(|_| ParsingSnafu)? + .trim()) } /// Returns whether this record was just null bytes diff --git a/src/naif/daf/mod.rs b/src/naif/daf/mod.rs index 4d58dfb8..e60cc89d 100644 --- a/src/naif/daf/mod.rs +++ b/src/naif/daf/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,8 +8,11 @@ * Documentation: https://nyxspace.com/ */ +use crate::{errors::IntegrityError, math::interpolation::InterpolationError, NaifId}; use core::fmt::Display; use hifitime::Epoch; +use snafu::prelude::*; +use std::io::Error as IOError; use zerocopy::{AsBytes, FromBytes}; pub(crate) const RCRD_LEN: usize = 1024; @@ -21,12 +24,14 @@ pub mod summary_record; pub use daf::DAF; -use crate::prelude::AniseError; +use crate::errors::DecodingError; use core::fmt::Debug; pub use file_record::FileRecord; pub use name_record::NameRecord; pub use summary_record::SummaryRecord; +use self::file_record::FileRecordError; + pub trait NAIFRecord: AsBytes + FromBytes + Sized + Default + Debug { const SIZE: usize = core::mem::size_of::(); } @@ -47,9 +52,11 @@ pub trait NAIFSummaryRecord: NAIFRecord + Copy { fn is_empty(&self) -> bool { self.start_index() == self.end_index() } + /// Name of this NAIF type + const NAME: &'static str; } -pub trait NAIFDataSet<'a>: Sized + Display { +pub trait NAIFDataSet<'a>: Sized + Display + PartialEq { /// The underlying record representation type RecordKind: NAIFDataRecord<'a>; @@ -59,21 +66,276 @@ pub trait NAIFDataSet<'a>: Sized + Display { /// The state that is returned from an evaluation of this data set type StateKind; + /// The name of this data set, used in errors + const DATASET_NAME: &'static str; + /// Builds this dataset given a slice of f64 data - fn from_slice_f64(slice: &'a [f64]) -> Result; + fn from_slice_f64(slice: &'a [f64]) -> Result; - fn nth_record(&self, n: usize) -> Result; + fn nth_record(&self, n: usize) -> Result; fn evaluate( &self, epoch: Epoch, summary: &Self::SummaryKind, - ) -> Result; + ) -> Result; /// Checks the integrity of this data set, returns an error if the data has issues. - fn check_integrity(&self) -> Result<(), AniseError>; + fn check_integrity(&self) -> Result<(), IntegrityError>; } pub trait NAIFDataRecord<'a>: Display { fn from_slice_f64(slice: &'a [f64]) -> Self; } + +/// Errors associated with handling NAIF DAF files +#[derive(Debug, Snafu)] +#[snafu(visibility(pub(crate)))] +pub enum DAFError { + #[snafu(display("No DAF/{kind} data have been loaded"))] + NoDAFLoaded { kind: &'static str }, + /// While searching for the root of the loaded ephemeris tree, we're recursed more times than allowed. + MaxRecursionDepth, + #[snafu(display("DAF/{kind}: summary {id} not present"))] + SummaryIdError { kind: &'static str, id: NaifId }, + #[snafu(display( + "DAF/{kind}: summary {id} not present or does not cover requested epoch of {epoch}" + ))] + SummaryIdAtEpochError { + kind: &'static str, + id: NaifId, + epoch: Epoch, + }, + #[snafu(display("DAF/{kind}: summary `{name}` not present"))] + SummaryNameError { kind: &'static str, name: String }, + #[snafu(display( + "DAF/{kind}: summary `{name}` not present or does not cover requested epoch of {epoch}" + ))] + SummaryNameAtEpochError { + kind: &'static str, + name: String, + epoch: Epoch, + }, + #[snafu(display("DAF/{kind}: no interpolation data for `{name}` at {epoch}"))] + InterpolationDataErrorFromName { + kind: &'static str, + name: String, + epoch: Epoch, + }, + #[snafu(display("DAF/{kind}: no interpolation data for {id} at {epoch}"))] + InterpolationDataErrorFromId { + kind: &'static str, + id: NaifId, + epoch: Epoch, + }, + #[snafu(display( + "DAF/{kind}: file record is empty (ensure file is valid, e.g. do you need to run git-lfs)" + ))] + FileRecord { + kind: &'static str, + #[snafu(backtrace)] + source: FileRecordError, + }, + #[snafu(display( + "DAF/{kind}: summary contains no data (start and end index both set to {idx})" + ))] + EmptySummary { kind: &'static str, idx: usize }, + #[snafu(display("DAF/{kind}: no data record for `{name}`"))] + NameError { kind: &'static str, name: String }, + #[snafu(display("DAF/{kind}: summary: {source}"))] + DecodingSummary { + kind: &'static str, + #[snafu(backtrace)] + source: DecodingError, + }, + #[snafu(display("DAF/{kind}: comments: {source}"))] + DecodingComments { + kind: &'static str, + #[snafu(backtrace)] + source: DecodingError, + }, + #[snafu(display("DAF/{kind}: name: {source}"))] + DecodingName { + kind: &'static str, + #[snafu(backtrace)] + source: DecodingError, + }, + #[snafu(display("DAF/{kind}: data index {idx}: {source}"))] + DecodingData { + kind: &'static str, + idx: usize, + #[snafu(backtrace)] + source: DecodingError, + }, + DAFIntegrity { + #[snafu(backtrace)] + source: IntegrityError, + }, + #[snafu(display("while {action} encountered input/output error {source}"))] + IO { action: String, source: IOError }, +} + +// Manual implementation of PartialEq because IOError does not derive it, sadly. +impl PartialEq for DAFError { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::NoDAFLoaded { kind: l_kind }, Self::NoDAFLoaded { kind: r_kind }) => { + l_kind == r_kind + } + ( + Self::SummaryIdError { + kind: l_kind, + id: l_id, + }, + Self::SummaryIdError { + kind: r_kind, + id: r_id, + }, + ) => l_kind == r_kind && l_id == r_id, + ( + Self::SummaryIdAtEpochError { + kind: l_kind, + id: l_id, + epoch: l_epoch, + }, + Self::SummaryIdAtEpochError { + kind: r_kind, + id: r_id, + epoch: r_epoch, + }, + ) => l_kind == r_kind && l_id == r_id && l_epoch == r_epoch, + ( + Self::SummaryNameError { + kind: l_kind, + name: l_name, + }, + Self::SummaryNameError { + kind: r_kind, + name: r_name, + }, + ) => l_kind == r_kind && l_name == r_name, + ( + Self::SummaryNameAtEpochError { + kind: l_kind, + name: l_name, + epoch: l_epoch, + }, + Self::SummaryNameAtEpochError { + kind: r_kind, + name: r_name, + epoch: r_epoch, + }, + ) => l_kind == r_kind && l_name == r_name && l_epoch == r_epoch, + ( + Self::InterpolationDataErrorFromName { + kind: l_kind, + name: l_name, + epoch: l_epoch, + }, + Self::InterpolationDataErrorFromName { + kind: r_kind, + name: r_name, + epoch: r_epoch, + }, + ) => l_kind == r_kind && l_name == r_name && l_epoch == r_epoch, + ( + Self::InterpolationDataErrorFromId { + kind: l_kind, + id: l_id, + epoch: l_epoch, + }, + Self::InterpolationDataErrorFromId { + kind: r_kind, + id: r_id, + epoch: r_epoch, + }, + ) => l_kind == r_kind && l_id == r_id && l_epoch == r_epoch, + ( + Self::FileRecord { + kind: l_kind, + source: l_source, + }, + Self::FileRecord { + kind: r_kind, + source: r_source, + }, + ) => l_kind == r_kind && l_source == r_source, + ( + Self::EmptySummary { + kind: l_kind, + idx: l_idx, + }, + Self::EmptySummary { + kind: r_kind, + idx: r_idx, + }, + ) => l_kind == r_kind && l_idx == r_idx, + ( + Self::NameError { + kind: l_kind, + name: l_name, + }, + Self::NameError { + kind: r_kind, + name: r_name, + }, + ) => l_kind == r_kind && l_name == r_name, + ( + Self::DecodingSummary { + kind: l_kind, + source: l_source, + }, + Self::DecodingSummary { + kind: r_kind, + source: r_source, + }, + ) => l_kind == r_kind && l_source == r_source, + ( + Self::DecodingComments { + kind: l_kind, + source: l_source, + }, + Self::DecodingComments { + kind: r_kind, + source: r_source, + }, + ) => l_kind == r_kind && l_source == r_source, + ( + Self::DecodingName { + kind: l_kind, + source: l_source, + }, + Self::DecodingName { + kind: r_kind, + source: r_source, + }, + ) => l_kind == r_kind && l_source == r_source, + ( + Self::DecodingData { + kind: l_kind, + idx: l_idx, + source: l_source, + }, + Self::DecodingData { + kind: r_kind, + idx: r_idx, + source: r_source, + }, + ) => l_kind == r_kind && l_idx == r_idx && l_source == r_source, + (Self::DAFIntegrity { source: l_source }, Self::DAFIntegrity { source: r_source }) => { + l_source == r_source + } + ( + Self::IO { + action: l_action, + source: _l_source, + }, + Self::IO { + action: r_action, + source: _r_source, + }, + ) => l_action == r_action, + _ => core::mem::discriminant(self) == core::mem::discriminant(other), + } + } +} diff --git a/src/naif/daf/name_record.rs b/src/naif/daf/name_record.rs index ea500bda..75326d48 100644 --- a/src/naif/daf/name_record.rs +++ b/src/naif/daf/name_record.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -10,10 +10,10 @@ use zerocopy::{AsBytes, FromBytes, FromZeroes}; -use crate::{prelude::AniseError, DBL_SIZE}; +use crate::DBL_SIZE; use log::warn; -use super::{NAIFRecord, RCRD_LEN}; +use super::{DAFError, NAIFRecord, NAIFSummaryRecord, RCRD_LEN}; #[derive(AsBytes, Clone, Debug, FromZeroes, FromBytes)] #[repr(C)] @@ -32,9 +32,11 @@ impl Default for NameRecord { impl NAIFRecord for NameRecord {} impl NameRecord { - /// Returns the number of names in this record - pub fn num_entries(&self, summary_size: usize) -> usize { - self.raw_names.len() / summary_size * DBL_SIZE + /// Returns the maximum number of names in this record given the provided summary size. + /// + /// Note that we don't actually use `&self` here, but it's just easier to call. + pub const fn num_entries(&self, summary_size: usize) -> usize { + RCRD_LEN / (summary_size * DBL_SIZE) } pub fn nth_name(&self, n: usize, summary_size: usize) -> &str { @@ -83,12 +85,19 @@ impl NameRecord { /// Searches the name record for the provided name. /// /// **Warning:** this performs an O(N) search! - pub fn index_from_name(&self, name: &str, summary_size: usize) -> Result { + pub fn index_from_name( + &self, + name: &str, + summary_size: usize, + ) -> Result { for i in 0..self.num_entries(summary_size) { if self.nth_name(i, summary_size) == name { return Ok(i); } } - Err(AniseError::ItemNotFound) + Err(DAFError::NameError { + kind: R::NAME, + name: name.to_string(), + }) } } diff --git a/src/naif/daf/summary_record.rs b/src/naif/daf/summary_record.rs index d3c292a2..cff48616 100644 --- a/src/naif/daf/summary_record.rs +++ b/src/naif/daf/summary_record.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/naif/kpl/fk.rs b/src/naif/kpl/fk.rs index 2bf34c0a..f4bc903d 100644 --- a/src/naif/kpl/fk.rs +++ b/src/naif/kpl/fk.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/naif/kpl/mod.rs b/src/naif/kpl/mod.rs index eb51e37a..850315e5 100644 --- a/src/naif/kpl/mod.rs +++ b/src/naif/kpl/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -12,7 +12,7 @@ use core::str::FromStr; use std::fmt::Debug; use std::{collections::HashMap, hash::Hash}; -use crate::prelude::AniseError; +use snafu::{whatever, Whatever}; use self::parser::Assignment; @@ -38,10 +38,10 @@ pub enum KPLValue { } impl KPLValue { - pub fn to_vec_f64(&self) -> Result, AniseError> { + pub fn to_vec_f64(&self) -> Result, Whatever> { match self { KPLValue::Matrix(data) => Ok(data.clone()), - _ => Err(AniseError::ParameterNotSpecified), + _ => whatever!("can only convert matrices to vec of f64"), } } } @@ -90,7 +90,7 @@ pub enum Parameter { } impl FromStr for Parameter { - type Err = AniseError; + type Err = Whatever; fn from_str(s: &str) -> Result { match s { @@ -115,12 +115,10 @@ impl FromStr for Parameter { "UNITS" => Ok(Self::Units), "AXES" => Ok(Self::Axes), "GMLIST" | "NAME" | "SPEC" => { - // This is a known unsupported parameter - Err(AniseError::ParameterNotSpecified) + whatever!("unsupported parameter `{s}`") } _ => { - println!("WHAT IS `{s}` ?"); - Err(AniseError::ParameterNotSpecified) + whatever!("unknown parameter `{s}`") } } } diff --git a/src/naif/kpl/parser.rs b/src/naif/kpl/parser.rs index 4bb4c0c8..aef4079e 100644 --- a/src/naif/kpl/parser.rs +++ b/src/naif/kpl/parser.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -17,10 +17,10 @@ use std::path::Path; use log::{error, info, warn}; +use crate::almanac::MAX_PLANETARY_DATA; use crate::naif::kpl::tpc::TPCItem; use crate::naif::kpl::Parameter; -use crate::prelude::AniseError; -use crate::structure::dataset::{DataSet, DataSetBuilder, DataSetType}; +use crate::structure::dataset::{DataSet, DataSetBuilder, DataSetError, DataSetType}; use crate::structure::metadata::Metadata; use crate::structure::planetocentric::ellipsoid::Ellipsoid; use crate::structure::planetocentric::phaseangle::PhaseAngle; @@ -78,7 +78,7 @@ impl Assignment { pub fn parse_file, I: KPLItem>( file_path: P, show_comments: bool, -) -> Result, AniseError> { +) -> Result, DataSetError> { let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); @@ -135,7 +135,7 @@ pub fn parse_file, I: KPLItem>( pub fn convert_tpc<'a, P: AsRef>( pck: P, gm: P, -) -> Result, AniseError> { +) -> Result, DataSetError> { let mut buf = vec![]; let mut dataset_builder = DataSetBuilder::default(); @@ -232,7 +232,7 @@ pub fn convert_tpc<'a, P: AsRef>( println!("Added {} items", dataset_builder.dataset.lut.by_id.len()); - let mut dataset = dataset_builder.dataset; + let mut dataset = dataset_builder.finalize(buf)?; dataset.metadata = Metadata::default(); dataset.metadata.dataset_type = DataSetType::PlanetaryData; diff --git a/src/naif/kpl/tpc.rs b/src/naif/kpl/tpc.rs index f233ba94..60be287d 100644 --- a/src/naif/kpl/tpc.rs +++ b/src/naif/kpl/tpc.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -125,18 +125,18 @@ fn test_parse_gm() { // Basic values testing assert_eq!( assignments[&1].data[&Parameter::GravitationalParameter], - KPLValue::Float(2.2031780000000021E+04) + KPLValue::Float(2.203_178_000_000_002E4) ); assert_eq!( assignments[&399].data[&Parameter::GravitationalParameter], - KPLValue::Float(3.9860043543609598E+05) + KPLValue::Float(3.986_004_354_360_96E5) ); } #[test] fn test_anise_conversion() { - use crate::errors::AniseError; + use crate::errors::InputOutputError; use crate::naif::kpl::parser::convert_tpc; use crate::{file2heap, file_mmap, structure::dataset::DataSet}; use std::fs::File; diff --git a/src/naif/mod.rs b/src/naif/mod.rs index 33e35761..7464e6aa 100644 --- a/src/naif/mod.rs +++ b/src/naif/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/naif/pck/mod.rs b/src/naif/pck/mod.rs index fdf8ad95..1994e6ff 100644 --- a/src/naif/pck/mod.rs +++ b/src/naif/pck/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -28,6 +28,8 @@ pub struct BPCSummaryRecord { impl NAIFRecord for BPCSummaryRecord {} impl NAIFSummaryRecord for BPCSummaryRecord { + const NAME: &'static str = "BPCSummaryRecord"; + fn start_index(&self) -> usize { self.start_idx as usize } diff --git a/src/naif/spk/datatypes/chebyshev.rs b/src/naif/spk/datatypes/chebyshev.rs index 307f264a..3ef79e22 100644 --- a/src/naif/spk/datatypes/chebyshev.rs +++ b/src/naif/spk/datatypes/chebyshev.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -10,18 +10,21 @@ use core::fmt; use hifitime::{Duration, Epoch, TimeUnits}; -use log::error; +use snafu::{ensure, ResultExt}; use crate::{ - errors::IntegrityErrorKind, - math::{interpolation::chebyshev_eval, Vector3}, + errors::{DecodingError, IntegrityError, TooFewDoublesSnafu}, + math::{ + interpolation::{chebyshev_eval, InterpDecodingSnafu, InterpolationError}, + Vector3, + }, naif::{ daf::{NAIFDataRecord, NAIFDataSet, NAIFSummaryRecord}, spk::summary::SPKSummaryRecord, }, - prelude::AniseError, }; +#[derive(PartialEq)] pub struct Type2ChebyshevSet<'a> { pub init_epoch: Epoch, pub interval_length: Duration, @@ -54,26 +57,49 @@ impl<'a> NAIFDataSet<'a> for Type2ChebyshevSet<'a> { type SummaryKind = SPKSummaryRecord; type StateKind = (Vector3, Vector3); type RecordKind = Type2ChebyshevRecord<'a>; - - fn from_slice_f64(slice: &'a [f64]) -> Result { - if slice.len() < 5 { - error!( - "Cannot build a Type 2 Chebyshev set from only {} items", - slice.len() - ); - return Err(AniseError::MalformedData(5)); - } + const DATASET_NAME: &'static str = "Chebyshev Type 2"; + + fn from_slice_f64(slice: &'a [f64]) -> Result { + ensure!( + slice.len() >= 5, + TooFewDoublesSnafu { + dataset: Self::DATASET_NAME, + need: 5_usize, + got: slice.len() + } + ); // For this kind of record, the data is stored at the very end of the dataset let seconds_since_j2000 = slice[slice.len() - 4]; if !seconds_since_j2000.is_finite() { - // The Epoch initialization will fail on subnormal data - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(DecodingError::Integrity { + source: IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "seconds since J2000 ET", + }, + }); } + let start_epoch = Epoch::from_et_seconds(seconds_since_j2000); + let interval_length_s = slice[slice.len() - 3]; if !interval_length_s.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(DecodingError::Integrity { + source: IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "interval length in seconds", + }, + }); + } else if interval_length_s <= 0.0 { + return Err(DecodingError::Integrity { + source: IntegrityError::InvalidValue { + dataset: Self::DATASET_NAME, + variable: "interval length in seconds", + value: interval_length_s, + reason: "must be strictly greater than zero", + }, + }); } + let interval_length = interval_length_s.seconds(); let rsize = slice[slice.len() - 2] as usize; let num_records = slice[slice.len() - 1] as usize; @@ -87,11 +113,15 @@ impl<'a> NAIFDataSet<'a> for Type2ChebyshevSet<'a> { }) } - fn nth_record(&self, n: usize) -> Result { + fn nth_record(&self, n: usize) -> Result { Ok(Self::RecordKind::from_slice_f64( self.record_data .get(n * self.rsize..(n + 1) * self.rsize) - .ok_or(AniseError::MalformedData((n + 1) * self.rsize))?, + .ok_or(DecodingError::InaccessibleBytes { + start: n * self.rsize, + end: (n + 1) * self.rsize, + size: self.record_data.len(), + })?, )) } @@ -99,10 +129,14 @@ impl<'a> NAIFDataSet<'a> for Type2ChebyshevSet<'a> { &self, epoch: Epoch, summary: &Self::SummaryKind, - ) -> Result<(Vector3, Vector3), AniseError> { - if epoch < summary.start_epoch() { + ) -> Result<(Vector3, Vector3), InterpolationError> { + if epoch < summary.start_epoch() || epoch > summary.end_epoch() { // No need to go any further. - return Err(AniseError::MissingInterpolationData(epoch)); + return Err(InterpolationError::NoInterpolationData { + req: epoch, + start: summary.start_epoch(), + end: summary.end_epoch(), + }); } let window_duration_s = self.interval_length.to_seconds(); @@ -138,7 +172,9 @@ impl<'a> NAIFDataSet<'a> for Type2ChebyshevSet<'a> { ((ephem_start_delta_s / window_duration_s) as usize + 1).min(self.num_records); // Now, build the X, Y, Z data from the record data. - let record = self.nth_record(spline_idx - 1)?; + let record = self + .nth_record(spline_idx - 1) + .with_context(|_| InterpDecodingSnafu)?; let normalized_time = (epoch.to_et_seconds() - record.midpoint_et_s) / radius_s; @@ -158,11 +194,14 @@ impl<'a> NAIFDataSet<'a> for Type2ChebyshevSet<'a> { Ok((pos, vel)) } - fn check_integrity(&self) -> Result<(), AniseError> { + fn check_integrity(&self) -> Result<(), IntegrityError> { // Verify that none of the data is invalid once when we load it. for val in self.record_data { if !val.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "one of the record data", + }); } } @@ -213,6 +252,7 @@ impl<'a> NAIFDataRecord<'a> for Type2ChebyshevRecord<'a> { } } +#[derive(PartialEq)] pub struct Type3ChebyshevRecord<'a> { pub midpoint: Epoch, pub radius: Duration, @@ -256,3 +296,92 @@ impl<'a> NAIFDataRecord<'a> for Type3ChebyshevRecord<'a> { } } } + +#[cfg(test)] +mod chebyshev_ut { + use crate::{ + errors::{DecodingError, IntegrityError}, + naif::daf::NAIFDataSet, + }; + + use super::Type2ChebyshevSet; + + #[test] + fn too_small() { + if Type2ChebyshevSet::from_slice_f64(&[0.1, 0.2, 0.3, 0.4]) + != Err(DecodingError::TooFewDoubles { + dataset: "Chebyshev Type 2", + got: 4, + need: 5, + }) + { + panic!("test failure"); + } + } + + #[test] + fn subnormal() { + match Type2ChebyshevSet::from_slice_f64(&[0.0, f64::INFINITY, 0.0, 0.0, 0.0]) { + Ok(_) => panic!("test failed on invalid init_epoch"), + Err(e) => { + assert_eq!( + e, + DecodingError::Integrity { + source: IntegrityError::SubNormal { + dataset: "Chebyshev Type 2", + variable: "seconds since J2000 ET", + }, + } + ); + } + } + + match Type2ChebyshevSet::from_slice_f64(&[0.0, 0.0, f64::INFINITY, 0.0, 0.0]) { + Ok(_) => panic!("test failed on invalid interval_length"), + Err(e) => { + assert_eq!( + e, + DecodingError::Integrity { + source: IntegrityError::SubNormal { + dataset: "Chebyshev Type 2", + variable: "interval length in seconds", + }, + } + ); + } + } + + match Type2ChebyshevSet::from_slice_f64(&[0.0, 0.0, -1e-16, 0.0, 0.0]) { + Ok(_) => panic!("test failed on invalid interval_length"), + Err(e) => { + assert_eq!( + e, + DecodingError::Integrity { + source: IntegrityError::InvalidValue { + dataset: "Chebyshev Type 2", + variable: "interval length in seconds", + value: -1e-16, + reason: "must be strictly greater than zero" + }, + } + ); + } + } + + // Load a slice whose metadata is OK but the record data is not + let dataset = + Type2ChebyshevSet::from_slice_f64(&[f64::INFINITY, 0.0, 2e-16, 0.0, 0.0]).unwrap(); + match dataset.check_integrity() { + Ok(_) => panic!("test failed on invalid interval_length"), + Err(e) => { + assert_eq!( + e, + IntegrityError::SubNormal { + dataset: "Chebyshev Type 2", + variable: "one of the record data", + }, + ); + } + } + } +} diff --git a/src/naif/spk/datatypes/hermite.rs b/src/naif/spk/datatypes/hermite.rs index 6d75ed8c..5c418da4 100644 --- a/src/naif/spk/datatypes/hermite.rs +++ b/src/naif/spk/datatypes/hermite.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -10,20 +10,22 @@ use core::fmt; use hifitime::{Duration, Epoch, TimeUnits}; -use log::error; +use snafu::{ensure, ResultExt}; -use crate::errors::IntegrityErrorKind; -use crate::math::interpolation::{hermite_eval, MAX_SAMPLES}; +use crate::errors::{DecodingError, IntegrityError, TooFewDoublesSnafu}; +use crate::math::interpolation::{ + hermite_eval, InterpDecodingSnafu, InterpolationError, MAX_SAMPLES, +}; use crate::naif::spk::summary::SPKSummaryRecord; use crate::{ math::{cartesian::CartesianState, Vector3}, naif::daf::{NAIFDataRecord, NAIFDataSet, NAIFRecord}, - prelude::AniseError, DBL_SIZE, }; use super::posvel::PositionVelocityRecord; +#[derive(PartialEq)] pub struct HermiteSetType12<'a> { pub first_state_epoch: Epoch, pub step_size: Duration, @@ -50,25 +52,39 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType12<'a> { type SummaryKind = SPKSummaryRecord; type StateKind = CartesianState; type RecordKind = PositionVelocityRecord; - - fn from_slice_f64(slice: &'a [f64]) -> Result { - if slice.len() < 5 { - error!( - "Cannot build a Type 12 Hermite set from only {} items", - slice.len() - ); - return Err(AniseError::MalformedData(5)); - } + const DATASET_NAME: &'static str = "Hermite Type 12"; + + fn from_slice_f64(slice: &'a [f64]) -> Result { + ensure!( + slice.len() >= 5, + TooFewDoublesSnafu { + dataset: Self::DATASET_NAME, + need: 5_usize, + got: slice.len() + } + ); // For this kind of record, the metadata is stored at the very end of the dataset, so we need to read that first. let seconds_since_j2000 = slice[slice.len() - 4]; if !seconds_since_j2000.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(DecodingError::Integrity { + source: IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "seconds since J2000 ET", + }, + }); } + let first_state_epoch = Epoch::from_et_seconds(seconds_since_j2000); let step_size_s = slice[slice.len() - 3]; if !step_size_s.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(DecodingError::Integrity { + source: IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "step size in seconds", + }, + }); } + let step_size = step_size_s.seconds(); let window_size = slice[slice.len() - 2] as usize; let num_records = slice[slice.len() - 1] as usize; @@ -82,12 +98,16 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType12<'a> { }) } - fn nth_record(&self, n: usize) -> Result { + fn nth_record(&self, n: usize) -> Result { let rcrd_len = self.record_data.len() / self.num_records; Ok(Self::RecordKind::from_slice_f64( self.record_data .get(n * rcrd_len..(n + 1) * rcrd_len) - .ok_or(AniseError::MalformedData((n + 1) * rcrd_len))?, + .ok_or(DecodingError::InaccessibleBytes { + start: n * rcrd_len, + end: (n + 1) * rcrd_len, + size: self.record_data.len(), + })?, )) } @@ -95,14 +115,17 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType12<'a> { &self, _epoch: Epoch, _: &Self::SummaryKind, - ) -> Result { + ) -> Result { todo!("https://github.com/anise-toolkit/anise.rs/issues/14") } - fn check_integrity(&self) -> Result<(), AniseError> { + fn check_integrity(&self) -> Result<(), IntegrityError> { for val in self.record_data { if !val.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "one of the record data", + }); } } @@ -110,6 +133,7 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType12<'a> { } } +#[derive(PartialEq)] pub struct HermiteSetType13<'a> { /// Number of samples to use to build the interpolation pub samples: usize, @@ -147,26 +171,72 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType13<'a> { type SummaryKind = SPKSummaryRecord; type StateKind = (Vector3, Vector3); type RecordKind = PositionVelocityRecord; + const DATASET_NAME: &'static str = "Hermite Type 13"; + + fn from_slice_f64(slice: &'a [f64]) -> Result { + ensure!( + slice.len() >= 3, + TooFewDoublesSnafu { + dataset: Self::DATASET_NAME, + need: 3_usize, + got: slice.len() + } + ); - fn from_slice_f64(slice: &'a [f64]) -> Result { - if slice.len() < 3 { - error!( - "Cannot build a Type 13 Hermite set from only {} items", - slice.len() - ); - return Err(AniseError::MalformedData(5)); - } // For this kind of record, the metadata is stored at the very end of the dataset - let num_records = slice[slice.len() - 1] as usize; + let num_records_f64 = slice[slice.len() - 1]; + if !num_records_f64.is_finite() { + return Err(DecodingError::Integrity { + source: IntegrityError::InvalidValue { + dataset: Self::DATASET_NAME, + variable: "number of records", + value: num_records_f64, + reason: "must be a finite value", + }, + }); + } + let num_records = num_records_f64 as usize; + // NOTE: The Type 12 and 13 specify that the windows size minus one is stored! - let samples = slice[slice.len() - 2] as usize + 1; + let num_samples_f64 = slice[slice.len() - 2]; + if !num_samples_f64.is_finite() { + return Err(DecodingError::Integrity { + source: IntegrityError::InvalidValue { + dataset: Self::DATASET_NAME, + variable: "number of interpolation samples", + value: num_samples_f64, + reason: "must be a finite value", + }, + }); + } + + let samples = num_samples_f64 as usize + 1; // NOTE: The ::SIZE returns the C representation memory size of this, but we only want the number of doubles. let state_data_end_idx = PositionVelocityRecord::SIZE / DBL_SIZE * num_records; - let state_data = slice.get(0..state_data_end_idx).unwrap(); + let state_data = + slice + .get(0..state_data_end_idx) + .ok_or(DecodingError::InaccessibleBytes { + start: 0, + end: state_data_end_idx, + size: slice.len(), + })?; let epoch_data_end_idx = state_data_end_idx + num_records; - let epoch_data = slice.get(state_data_end_idx..epoch_data_end_idx).unwrap(); + let epoch_data = slice.get(state_data_end_idx..epoch_data_end_idx).ok_or( + DecodingError::InaccessibleBytes { + start: state_data_end_idx, + end: epoch_data_end_idx, + size: slice.len(), + }, + )?; // And the epoch directory is whatever remains minus the metadata - let epoch_registry = slice.get(epoch_data_end_idx..slice.len() - 2).unwrap(); + let epoch_registry = slice.get(epoch_data_end_idx..slice.len() - 2).ok_or( + DecodingError::InaccessibleBytes { + start: epoch_data_end_idx, + end: slice.len() - 2, + size: slice.len(), + }, + )?; Ok(Self { samples, @@ -177,12 +247,16 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType13<'a> { }) } - fn nth_record(&self, n: usize) -> Result { + fn nth_record(&self, n: usize) -> Result { let rcrd_len = self.state_data.len() / self.num_records; Ok(Self::RecordKind::from_slice_f64( self.state_data .get(n * rcrd_len..(n + 1) * rcrd_len) - .ok_or(AniseError::MalformedData((n + 1) * rcrd_len))?, + .ok_or(DecodingError::InaccessibleBytes { + start: n * rcrd_len, + end: (n + 1) * rcrd_len, + size: self.state_data.len(), + })?, )) } @@ -190,14 +264,18 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType13<'a> { &self, epoch: Epoch, _: &Self::SummaryKind, - ) -> Result { + ) -> Result { // Start by doing a binary search on the epoch registry to limit the search space in the total number of epochs. // TODO: use the epoch registry to reduce the search space // Check that we even have interpolation data for that time if epoch.to_et_seconds() < self.epoch_data[0] || epoch.to_et_seconds() > *self.epoch_data.last().unwrap() { - return Err(AniseError::MissingInterpolationData(epoch)); + return Err(InterpolationError::NoInterpolationData { + req: epoch, + start: Epoch::from_et_seconds(self.epoch_data[0]), + end: Epoch::from_et_seconds(*self.epoch_data.last().unwrap()), + }); } // Now, perform a binary search on the epochs themselves. match self.epoch_data.binary_search_by(|epoch_et| { @@ -207,7 +285,10 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType13<'a> { }) { Ok(idx) => { // Oh wow, this state actually exists, no interpolation needed! - Ok(self.nth_record(idx)?.to_pos_vel()) + Ok(self + .nth_record(idx) + .with_context(|_| InterpDecodingSnafu)? + .to_pos_vel()) } Err(idx) => { // We didn't find it, so let's build an interpolation here. @@ -231,7 +312,7 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType13<'a> { let mut vys = [0.0; MAX_SAMPLES]; let mut vzs = [0.0; MAX_SAMPLES]; for (cno, idx) in (first_idx..last_idx).enumerate() { - let record = self.nth_record(idx)?; + let record = self.nth_record(idx).with_context(|_| InterpDecodingSnafu)?; xs[cno] = record.x_km; ys[cno] = record.y_km; zs[cno] = record.z_km; @@ -275,26 +356,138 @@ impl<'a> NAIFDataSet<'a> for HermiteSetType13<'a> { } } - fn check_integrity(&self) -> Result<(), AniseError> { + fn check_integrity(&self) -> Result<(), IntegrityError> { // Verify that none of the data is invalid once when we load it. for val in self.epoch_data { if !val.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "one of the epoch data", + }); } } for val in self.epoch_registry { if !val.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "one of the epoch registry data", + }); } } for val in self.state_data { if !val.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "one of the state data", + }); } } Ok(()) } } + +#[cfg(test)] +mod hermite_ut { + use crate::{ + errors::{DecodingError, IntegrityError}, + naif::daf::NAIFDataSet, + }; + + use super::HermiteSetType13; + + #[test] + fn too_small() { + if HermiteSetType13::from_slice_f64(&[0.1, 0.2]) + != Err(DecodingError::TooFewDoubles { + dataset: "Hermite Type 13", + got: 2, + need: 3, + }) + { + panic!("test failure"); + } + } + + #[test] + fn invalid_data() { + // Two metadata, one state, one epoch + let zeros = [0.0_f64; 2 * 7 + 2]; + + let mut invalid_num_records = zeros.clone(); + invalid_num_records[zeros.len() - 1] = f64::INFINITY; + match HermiteSetType13::from_slice_f64(&invalid_num_records) { + Ok(_) => panic!("test failed on invalid num records"), + Err(e) => { + assert_eq!( + e, + DecodingError::Integrity { + source: IntegrityError::InvalidValue { + dataset: "Hermite Type 13", + variable: "number of records", + value: f64::INFINITY, + reason: "must be a finite value", + }, + } + ); + } + } + + let mut invalid_num_samples = zeros.clone(); + invalid_num_samples[zeros.len() - 2] = f64::INFINITY; + match HermiteSetType13::from_slice_f64(&invalid_num_samples) { + Ok(_) => panic!("test failed on invalid num samples"), + Err(e) => { + assert_eq!( + e, + DecodingError::Integrity { + source: IntegrityError::InvalidValue { + dataset: "Hermite Type 13", + variable: "number of interpolation samples", + value: f64::INFINITY, + reason: "must be a finite value", + }, + } + ); + } + } + + let mut invalid_epoch = zeros.clone(); + invalid_epoch[zeros.len() - 3] = f64::INFINITY; + + let dataset = HermiteSetType13::from_slice_f64(&invalid_epoch).unwrap(); + match dataset.check_integrity() { + Ok(_) => panic!("test failed on invalid interval_length"), + Err(e) => { + assert_eq!( + e, + IntegrityError::SubNormal { + dataset: "Hermite Type 13", + variable: "one of the epoch registry data", + }, + ); + } + } + + let mut invalid_record = zeros.clone(); + invalid_record[0] = f64::INFINITY; + // Force the number of records to be one, otherwise everything is considered the epoch registry + invalid_record[zeros.len() - 1] = 1.0; + + let dataset = HermiteSetType13::from_slice_f64(&invalid_record).unwrap(); + match dataset.check_integrity() { + Ok(_) => panic!("test failed on invalid interval_length"), + Err(e) => { + assert_eq!( + e, + IntegrityError::SubNormal { + dataset: "Hermite Type 13", + variable: "one of the state data", + }, + ); + } + } + } +} diff --git a/src/naif/spk/datatypes/lagrange.rs b/src/naif/spk/datatypes/lagrange.rs index 212d1d88..2190bd20 100644 --- a/src/naif/spk/datatypes/lagrange.rs +++ b/src/naif/spk/datatypes/lagrange.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -10,21 +10,21 @@ use core::fmt; use hifitime::{Duration, Epoch, TimeUnits}; -use log::error; +use snafu::ensure; use crate::{ - errors::IntegrityErrorKind, - math::{cartesian::CartesianState, Vector3}, + errors::{DecodingError, IntegrityError, TooFewDoublesSnafu}, + math::{cartesian::CartesianState, interpolation::InterpolationError, Vector3}, naif::{ daf::{NAIFDataRecord, NAIFDataSet, NAIFRecord}, spk::summary::SPKSummaryRecord, }, - prelude::AniseError, DBL_SIZE, }; use super::posvel::PositionVelocityRecord; +#[derive(PartialEq)] pub struct LagrangeSetType8<'a> { pub first_state_epoch: Epoch, pub step_size: Duration, @@ -51,25 +51,40 @@ impl<'a> NAIFDataSet<'a> for LagrangeSetType8<'a> { type SummaryKind = SPKSummaryRecord; type StateKind = CartesianState; type RecordKind = PositionVelocityRecord; + const DATASET_NAME: &'static str = "Lagrange Type 8"; + + fn from_slice_f64(slice: &'a [f64]) -> Result { + ensure!( + slice.len() >= 5, + TooFewDoublesSnafu { + dataset: Self::DATASET_NAME, + need: 5_usize, + got: slice.len() + } + ); - fn from_slice_f64(slice: &'a [f64]) -> Result { - if slice.len() < 5 { - error!( - "Cannot build a Type 8 Lagrange set from only {} items", - slice.len() - ); - return Err(AniseError::MalformedData(5)); - } // For this kind of record, the metadata is stored at the very end of the dataset, so we need to read that first. let seconds_since_j2000 = slice[slice.len() - 4]; if !seconds_since_j2000.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(DecodingError::Integrity { + source: IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "seconds since J2000 ET", + }, + }); } + let first_state_epoch = Epoch::from_et_seconds(seconds_since_j2000); let step_size_s = slice[slice.len() - 3]; if !step_size_s.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(DecodingError::Integrity { + source: IntegrityError::SubNormal { + dataset: "Hermite Type 12", + variable: "step size in seconds", + }, + }); } + let step_size = step_size_s.seconds(); let degree = slice[slice.len() - 2] as usize; let num_records = slice[slice.len() - 1] as usize; @@ -83,12 +98,16 @@ impl<'a> NAIFDataSet<'a> for LagrangeSetType8<'a> { }) } - fn nth_record(&self, n: usize) -> Result { + fn nth_record(&self, n: usize) -> Result { let rcrd_len = self.record_data.len() / self.num_records; Ok(Self::RecordKind::from_slice_f64( self.record_data .get(n * rcrd_len..(n + 1) * rcrd_len) - .ok_or(AniseError::MalformedData((n + 1) * rcrd_len))?, + .ok_or(DecodingError::InaccessibleBytes { + start: n * rcrd_len, + end: (n + 1) * rcrd_len, + size: self.record_data.len(), + })?, )) } @@ -96,14 +115,17 @@ impl<'a> NAIFDataSet<'a> for LagrangeSetType8<'a> { &self, _epoch: Epoch, _: &Self::SummaryKind, - ) -> Result { + ) -> Result { todo!("https://github.com/anise-toolkit/anise.rs/issues/12") } - fn check_integrity(&self) -> Result<(), AniseError> { + fn check_integrity(&self) -> Result<(), IntegrityError> { for val in self.record_data { if !val.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "one of the record data", + }); } } @@ -111,6 +133,7 @@ impl<'a> NAIFDataSet<'a> for LagrangeSetType8<'a> { } } +#[derive(PartialEq)] pub struct LagrangeSetType9<'a> { pub degree: usize, pub num_records: usize, @@ -137,15 +160,18 @@ impl<'a> NAIFDataSet<'a> for LagrangeSetType9<'a> { type SummaryKind = SPKSummaryRecord; type StateKind = (Vector3, Vector3); type RecordKind = PositionVelocityRecord; + const DATASET_NAME: &'static str = "Lagrange Type 9"; + + fn from_slice_f64(slice: &'a [f64]) -> Result { + ensure!( + slice.len() >= 3, + TooFewDoublesSnafu { + dataset: Self::DATASET_NAME, + need: 3_usize, + got: slice.len() + } + ); - fn from_slice_f64(slice: &'a [f64]) -> Result { - if slice.len() < 3 { - error!( - "Cannot build a Type 9 Lagrange set from only {} items", - slice.len() - ); - return Err(AniseError::MalformedData(5)); - } // For this kind of record, the metadata is stored at the very end of the dataset let num_records = slice[slice.len() - 1] as usize; let degree = slice[slice.len() - 2] as usize; @@ -166,12 +192,16 @@ impl<'a> NAIFDataSet<'a> for LagrangeSetType9<'a> { }) } - fn nth_record(&self, n: usize) -> Result { + fn nth_record(&self, n: usize) -> Result { let rcrd_len = self.state_data.len() / self.num_records; Ok(Self::RecordKind::from_slice_f64( self.state_data .get(n * rcrd_len..(n + 1) * rcrd_len) - .ok_or(AniseError::MalformedData((n + 1) * rcrd_len))?, + .ok_or(DecodingError::InaccessibleBytes { + start: n * rcrd_len, + end: (n + 1) * rcrd_len, + size: self.state_data.len(), + })?, )) } @@ -179,27 +209,36 @@ impl<'a> NAIFDataSet<'a> for LagrangeSetType9<'a> { &self, _epoch: Epoch, _: &Self::SummaryKind, - ) -> Result { + ) -> Result { todo!("https://github.com/anise-toolkit/anise.rs/issues/13") } - fn check_integrity(&self) -> Result<(), AniseError> { + fn check_integrity(&self) -> Result<(), IntegrityError> { // Verify that none of the data is invalid once when we load it. for val in self.epoch_data { if !val.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "one of the epoch data", + }); } } for val in self.epoch_registry { if !val.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "one of the epoch registry data", + }); } } for val in self.state_data { if !val.is_finite() { - return Err(AniseError::IntegrityError(IntegrityErrorKind::SubNormal)); + return Err(IntegrityError::SubNormal { + dataset: Self::DATASET_NAME, + variable: "one of the state data", + }); } } diff --git a/src/naif/spk/datatypes/mod.rs b/src/naif/spk/datatypes/mod.rs index 7e51cf46..475dd513 100644 --- a/src/naif/spk/datatypes/mod.rs +++ b/src/naif/spk/datatypes/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/naif/spk/datatypes/posvel.rs b/src/naif/spk/datatypes/posvel.rs index 632275fd..3ab0fafe 100644 --- a/src/naif/spk/datatypes/posvel.rs +++ b/src/naif/spk/datatypes/posvel.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/naif/spk/mod.rs b/src/naif/spk/mod.rs index c97688e7..48117ed2 100644 --- a/src/naif/spk/mod.rs +++ b/src/naif/spk/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/naif/spk/summary.rs b/src/naif/spk/summary.rs index eebd27e9..2d19ec0d 100644 --- a/src/naif/spk/summary.rs +++ b/src/naif/spk/summary.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -10,15 +10,15 @@ use core::fmt; use hifitime::Epoch; -use log::{error, trace}; use zerocopy::{AsBytes, FromBytes, FromZeroes}; use crate::{ + ephemerides::EphemerisError, naif::daf::{NAIFRecord, NAIFSummaryRecord}, - prelude::AniseError, + prelude::{Frame, FrameUid}, }; -#[derive(Clone, Copy, Debug, Default, AsBytes, FromZeroes, FromBytes)] +#[derive(Clone, Copy, Debug, Default, AsBytes, FromZeroes, FromBytes, PartialEq)] #[repr(C)] pub struct SPKSummaryRecord { pub start_epoch_et_s: f64, @@ -32,13 +32,41 @@ pub struct SPKSummaryRecord { } impl<'a> SPKSummaryRecord { - pub fn spice_name(&self) -> Result<&'a str, AniseError> { - Self::id_to_human_name(self.target_id) + /// Returns the target frame UID of this summary + pub fn target_frame_uid(&self) -> FrameUid { + FrameUid { + ephemeris_id: self.target_id, + orientation_id: self.frame_id, + } + } + + /// Returns the center frame UID of this summary + pub fn center_frame_uid(&self) -> FrameUid { + FrameUid { + ephemeris_id: self.center_id, + orientation_id: self.frame_id, + } + } + + /// Returns the target frame UID of this summary + pub fn target_frame(&self) -> Frame { + Frame::from(self.target_frame_uid()) + } + + /// Returns the center frame UID of this summary + pub fn center_frame(&self) -> Frame { + Frame::from(self.center_frame_uid()) + } + + #[cfg(feature = "spkezr_validation")] + pub fn spice_name(&self) -> Result<&'a str, EphemerisError> { + Self::id_to_spice_name(self.target_id) } /// Converts the provided ID to its human name. /// Only works for the common celestial bodies - pub fn id_to_human_name(id: i32) -> Result<&'a str, AniseError> { + #[cfg(feature = "spkezr_validation")] + pub fn id_to_spice_name(id: i32) -> Result<&'a str, EphemerisError> { if id % 100 == 99 { // This is the planet itself match id / 100 { @@ -51,9 +79,7 @@ impl<'a> SPKSummaryRecord { 7 => Ok("Uranus"), 8 => Ok("Neptune"), 9 => Ok("Pluto"), - _ => Err(AniseError::DAFParserError(format!( - "Human name unknown for {id}" - ))), + _ => Err(EphemerisError::IdToName { id }), } } else if id == 301 { Ok("Moon") @@ -71,18 +97,17 @@ impl<'a> SPKSummaryRecord { 8 => Ok("Neptune Barycenter"), 9 => Ok("Pluto Barycenter"), 10 => Ok("Sun"), - _ => Err(AniseError::DAFParserError(format!( - "Human name unknown for barycenter {id}" - ))), + _ => Err(EphemerisError::IdToName { id }), } } else { - panic!("Human name unknown for {id}"); + Err(EphemerisError::IdToName { id }) } } /// Converts the provided ID to its human name. /// Only works for the common celestial bodies - pub fn human_name_to_id(name: &'a str) -> Result { + #[cfg(feature = "spkezr_validation")] + pub fn spice_name_to_id(name: &'a str) -> Result { match name { "Mercury" => Ok(1), "Venus" => Ok(2), @@ -102,30 +127,9 @@ impl<'a> SPKSummaryRecord { "Uranus Barycenter" => Ok(7), "Neptune Barycenter" => Ok(8), "Pluto Barycenter" => Ok(9), - _ => { - trace!("[human_name_to_id] unknown NAIF ID for `{name}`"); - Err(AniseError::ItemNotFound) - } - } - } - - /// Returns the human name of this segment if it can be guessed, else the standard name. - /// - /// # Returned value - /// 1. Typically, this will return the name of the celestial body - /// 2. The name is appended with "Barycenter" if the celestial object is know to have moons - /// - /// # Limitations - /// 0. In BSP files, the name is stored as a comment and is unstructured. So it's hard to copy those. (Help needed) - /// 1. One limitation of this approach is that given file may only contain one "Earth" - /// 2. Another limitation is that this code does not know all of the possible moons in the whole solar system. - pub fn human_name(&self) -> &'a str { - match Self::id_to_human_name(self.target_id) { - Ok(name) => name, - Err(e) => { - error!("{}", e); - panic!("Human name unknown for {self}") - } + _ => Err(EphemerisError::NameToId { + name: name.to_string(), + }), } } } @@ -133,6 +137,7 @@ impl<'a> SPKSummaryRecord { impl NAIFRecord for SPKSummaryRecord {} impl NAIFSummaryRecord for SPKSummaryRecord { + const NAME: &'static str = "SPKSummaryRecord"; fn start_index(&self) -> usize { self.start_idx as usize } diff --git a/src/orientations/mod.rs b/src/orientations/mod.rs new file mode 100644 index 00000000..ff1cae07 --- /dev/null +++ b/src/orientations/mod.rs @@ -0,0 +1,54 @@ +/* + * ANISE Toolkit + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. + * + * Documentation: https://nyxspace.com/ + */ + +use hifitime::Epoch; +use snafu::prelude::*; + +use crate::{ + errors::PhysicsError, math::interpolation::InterpolationError, naif::daf::DAFError, + prelude::FrameUid, +}; + +#[derive(Debug, Snafu, PartialEq)] +#[snafu(visibility(pub(crate)))] +pub enum OrientationError { + /// Somehow you've entered code that should not be reachable, please file a bug. + Unreachable, + #[snafu(display( + "could not load BPC because all {max_slots} are used (modify `MAX_LOADED_BPCS` at build time)" + ))] + StructureIsFull { max_slots: usize }, + #[snafu(display( + "Could not rotate from {from} to {to}: no common origin found at epoch {epoch}" + ))] + RotationOrigin { + from: FrameUid, + to: FrameUid, + epoch: Epoch, + }, + #[snafu(display("no oreitnation data loaded (must call load_bpc or DataSet::from_bytes)"))] + NoOrientationsLoaded, + #[snafu(display("when {action} caused {source}"))] + BPC { + action: &'static str, + #[snafu(backtrace)] + source: DAFError, + }, + #[snafu(display("during an orientation operation: {source}"))] + OrientationPhysics { + #[snafu(backtrace)] + source: PhysicsError, + }, + #[snafu(display("during an orientation interpolation {source}"))] + OrientationInterpolation { + #[snafu(backtrace)] + source: InterpolationError, + }, +} diff --git a/src/spline.rs b/src/spline.rs deleted file mode 100644 index c5ca22d4..00000000 --- a/src/spline.rs +++ /dev/null @@ -1,167 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ - -use crc32fast::hash; -use log::error; - -use crate::{ - structure::{ - common::InterpolationKind, - spline::{Field, Splines}, - }, - errors::{AniseError, IntegrityErrorKind, InternalErrorKind}, - naif::dafold::Endian, - parse_bytes_as, DBL_SIZE, -}; - -impl<'a> Splines<'a> { - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Returns the number of splines - pub fn len(&self) -> usize { - self.data.len() / self.metadata.len() + 1 - } - - pub fn fetch( - &self, - spline_idx: usize, - coeff_idx: usize, - field: Field, - ) -> Result { - // Compute the index in bytes at which the data starts - let offset = self.metadata.spline_offset(spline_idx) - + self.metadata.field_offset(field, coeff_idx)?; - - // Safely fetch this coefficient, returning an error if we're out of bounds. - match self.data.get(offset..offset + DBL_SIZE) { - Some(ptr) => Ok(parse_bytes_as!(f64, ptr, Endian::Big)), - None => { - error!( - "[fetch] could not fetch {}-th {:?} in spline {}", - coeff_idx, field, spline_idx - ); - Err(AniseError::MalformedData(offset + DBL_SIZE)) - } - } - } - - pub fn check_integrity(&self) -> Result<(), AniseError> { - // Ensure that the data is correctly decoded - let computed_chksum = hash(self.data); - if computed_chksum == self.data_checksum { - Ok(()) - } else { - error!( - "[integrity] expected hash {} but computed {}", - self.data_checksum, computed_chksum - ); - Err(AniseError::IntegrityError( - IntegrityErrorKind::ChecksumInvalid { - expected: self.data_checksum, - computed: computed_chksum, - }, - )) - } - } - - /// Evaluate this spline at the requested epoch and returns the position only. - pub fn position_at( - &self, - spline_idx: usize, - offset_s: f64, - window_length_s: f64, - kind: InterpolationKind, - ) -> Result<[f64; 3], AniseError> { - let orbit = self.posvel_at(spline_idx, offset_s, window_length_s, kind)?; - Ok([orbit[0], orbit[1], orbit[2]]) - } - - /// Evaluate this ephemeris at the requested epoch and returns the velocity only. - pub fn velocity_at( - &self, - spline_idx: usize, - offset_s: f64, - window_length_s: f64, - kind: InterpolationKind, - ) -> Result<[f64; 3], AniseError> { - let orbit = self.posvel_at(spline_idx, offset_s, window_length_s, kind)?; - Ok([orbit[3], orbit[4], orbit[5]]) - } - - /// Evaluate this ephemeris at the requested epoch and returns an orbit structure. - pub fn posvel_at( - &self, - _spline_idx: usize, - _offset_s: f64, - _window_length_s: f64, - kind: InterpolationKind, - ) -> Result<[f64; 6], AniseError> { - if kind != InterpolationKind::ChebyshevSeries { - return Err(InternalErrorKind::InterpolationNotSupported.into()); - } - - todo!() - - // let mut interp_t = [0.0_f64; MAX_DEGREE]; - // let mut interp_dt = [0.0_f64; MAX_DEGREE]; - - // let t1 = 2.0 * offset_s / window_length_s - 1.0; - // interp_t[0] = 1.0; - // interp_t[1] = t1; - // for i in 2..usize::from(self.config.degree) { - // interp_t[i] = (2.0 * t1) * interp_t[i - 1] - interp_t[i - 2]; - // } - - // interp_dt[0] = 0.0; - // interp_dt[1] = 1.0; - // interp_dt[2] = 2.0 * (2.0 * t1); - // for i in 3..usize::from(self.config.degree) { - // interp_dt[i] = (2.0 * t1) * interp_dt[i - 1] - interp_dt[i - 2] - // + interp_t[i - 1] - // + interp_t[i - 1]; - // } - // for interp_i in &mut interp_dt { - // *interp_i *= 2.0 / window_length_s; - // } - - // let mut x = 0.0; - // let mut y = 0.0; - // let mut z = 0.0; - // let mut vx = 0.0; - // let mut vy = 0.0; - // let mut vz = 0.0; - - // for (idx, pos_factor) in interp_t.iter().enumerate() { - // let vel_factor = interp_dt[idx]; - // if self.config.num_position_coeffs > 0 { - // x += pos_factor * self.fetch(spline_idx, idx, Coefficient::X)?; - // } - // if self.config.num_position_coeffs > 1 { - // y += pos_factor * self.fetch(spline_idx, idx, Coefficient::Y)?; - // } - // if self.config.num_position_coeffs > 2 { - // z += pos_factor * self.fetch(spline_idx, idx, Coefficient::Z)?; - // } - // if self.config.num_velocity_coeffs > 0 { - // vx += vel_factor * self.fetch(spline_idx, idx, Coefficient::VX)?; - // } - // if self.config.num_velocity_coeffs > 1 { - // vy += vel_factor * self.fetch(spline_idx, idx, Coefficient::VY)?; - // } - // if self.config.num_velocity_coeffs > 2 { - // vz += vel_factor * self.fetch(spline_idx, idx, Coefficient::VZ)?; - // } - // } - - // Ok([x, y, z, vx, vy, vz]) - } -} diff --git a/src/structure/dataset.rs b/src/structure/dataset.rs index 2c673a65..c9c0992c 100644 --- a/src/structure/dataset.rs +++ b/src/structure/dataset.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,22 +8,27 @@ * Documentation: https://nyxspace.com/ */ use super::{ - lookuptable::{Entry, LookUpTable}, + lookuptable::{Entry, LookUpTable, LutError}, metadata::Metadata, semver::Semver, ANISE_VERSION, }; -use crate::{errors::IntegrityErrorKind, prelude::AniseError, NaifId}; +use crate::{ + errors::{DecodingError, IntegrityError}, + NaifId, +}; +use bytes::Bytes; use core::fmt; use core::marker::PhantomData; +use core::ops::Deref; use der::{asn1::OctetStringRef, Decode, Encode, Reader, Writer}; use log::{error, trace}; -use std::ops::Deref; +use snafu::prelude::*; macro_rules! io_imports { () => { use std::fs::File; - use std::io::Write; + use std::io::{Error as IOError, ErrorKind as IOErrorKind, Write}; use std::path::Path; use std::path::PathBuf; }; @@ -31,6 +36,79 @@ macro_rules! io_imports { io_imports!(); +#[derive(Debug, Snafu)] +#[snafu(visibility(pub(crate)))] +pub enum DataSetError { + #[snafu(display("when {action} {source}"))] + DataSetLut { + action: &'static str, + source: LutError, + }, + #[snafu(display("when {action} {source}"))] + DataSetIntegrity { + action: &'static str, + source: IntegrityError, + }, + #[snafu(display("when {action} {source}"))] + DataDecoding { + action: &'static str, + source: DecodingError, + }, + #[snafu(display("input/output error while {action}"))] + IO { + action: &'static str, + source: IOError, + }, +} + +impl PartialEq for DataSetError { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + ( + Self::DataSetLut { + action: l_action, + source: l_source, + }, + Self::DataSetLut { + action: r_action, + source: r_source, + }, + ) => l_action == r_action && l_source == r_source, + ( + Self::DataSetIntegrity { + action: l_action, + source: l_source, + }, + Self::DataSetIntegrity { + action: r_action, + source: r_source, + }, + ) => l_action == r_action && l_source == r_source, + ( + Self::DataDecoding { + action: l_action, + source: l_source, + }, + Self::DataDecoding { + action: r_action, + source: r_source, + }, + ) => l_action == r_action && l_source == r_source, + ( + Self::IO { + action: l_action, + source: _l_source, + }, + Self::IO { + action: r_action, + source: _r_source, + }, + ) => l_action == r_action, + _ => false, + } + } +} + #[derive(Clone, Copy, PartialEq, Eq, Debug)] #[repr(u8)] pub enum DataSetType { @@ -75,7 +153,9 @@ impl<'a> Decode<'a> for DataSetType { } /// The kind of data that can be encoded in a dataset -pub trait DataSetT<'a>: Encode + Decode<'a> {} +pub trait DataSetT<'a>: Encode + Decode<'a> { + const NAME: &'static str; +} /// A DataSet is the core structure shared by all ANISE binary data. #[derive(Clone, Default, PartialEq, Eq, Debug)] @@ -85,7 +165,7 @@ pub struct DataSet<'a, T: DataSetT<'a>, const ENTRIES: usize> { pub lut: LookUpTable<'a, ENTRIES>, pub data_checksum: u32, /// The actual data from the dataset - pub bytes: &'a [u8], + pub bytes: Bytes, _daf_type: PhantomData, } @@ -102,7 +182,7 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSetBuilder<'a, T, ENTRIES> { data: T, id: Option, name: Option<&'a str>, - ) -> Result<(), AniseError> { + ) -> Result<(), DataSetError> { let mut this_buf = vec![]; data.encode_to_vec(&mut this_buf).unwrap(); // Build this entry data. @@ -112,21 +192,39 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSetBuilder<'a, T, ENTRIES> { }; if id.is_some() && name.is_some() { - self.dataset.lut.append(id.unwrap(), name.unwrap(), entry)?; + self.dataset + .lut + .append(id.unwrap(), name.unwrap(), entry) + .with_context(|_| DataSetLutSnafu { + action: "pushing data with ID and name", + })?; } else if id.is_some() { - self.dataset.lut.append_id(id.unwrap(), entry)?; + self.dataset + .lut + .append_id(id.unwrap(), entry) + .with_context(|_| DataSetLutSnafu { + action: "pushing data with ID only", + })?; } else if name.is_some() { - self.dataset.lut.append_name(name.unwrap(), entry)?; + self.dataset + .lut + .append_name(name.unwrap(), entry) + .with_context(|_| DataSetLutSnafu { + action: "pushing data with name only", + })?; } else { - return Err(AniseError::ItemNotFound); + return Err(DataSetError::DataSetLut { + action: "pushing data", + source: LutError::NoKeyProvided, + }); } buf.extend_from_slice(&this_buf); Ok(()) } - pub fn finalize(mut self, buf: &'a [u8]) -> Result, AniseError> { - self.dataset.bytes = buf; + pub fn finalize(mut self, buf: Vec) -> Result, DataSetError> { + self.dataset.bytes = Bytes::copy_from_slice(&buf); self.dataset.set_crc32(); Ok(self.dataset) } @@ -134,40 +232,52 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSetBuilder<'a, T, ENTRIES> { impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSet<'a, T, ENTRIES> { /// Try to load an Anise file from a pointer of bytes - pub fn try_from_bytes(bytes: &'a [u8]) -> Result { + pub fn try_from_bytes>(bytes: &'a B) -> Result { match Self::from_der(bytes) { Ok(ctx) => { trace!("[try_from_bytes] loaded context successfully"); // Check the full integrity on load of the file. - ctx.check_integrity()?; + ctx.check_integrity() + .with_context(|_| DataSetIntegritySnafu { + action: "loading data set from bytes", + })?; Ok(ctx) } - Err(e) => { + Err(_) => { // If we can't load the file, let's try to load the version only to be helpful - match bytes.get(0..5) { - Some(semver_bytes) => match Semver::from_der(semver_bytes) { - Ok(file_version) => { - if file_version == ANISE_VERSION { - error!("[try_from_bytes] context bytes corrupted but ANISE library version match"); - Err(AniseError::DecodingError(e)) - } else { - error!( - "[try_from_bytes] context bytes and ANISE library version mismatch" - ); - Err(AniseError::IncompatibleVersion { + let semver_bytes = bytes + .get(0..5) + .ok_or(DecodingError::InaccessibleBytes { + start: 0, + end: 5, + size: bytes.len(), + }) + .with_context(|_| DataDecodingSnafu { + action: "checking data set version", + })?; + match Semver::from_der(semver_bytes) { + Ok(file_version) => { + if file_version == ANISE_VERSION { + Err(DataSetError::DataDecoding { + action: "loading from bytes", + source: DecodingError::Obscure { kind: T::NAME }, + }) + } else { + Err(DataSetError::DataDecoding { + action: "checking data set version", + source: DecodingError::AniseVersion { got: file_version, exp: ANISE_VERSION, - }) - } - } - Err(e) => { - error!("[try_from_bytes] context bytes not in ANISE format"); - Err(AniseError::DecodingError(e)) + }, + }) } - }, - None => { - error!("[try_from_bytes] context bytes way too short (less than 5 bytes)"); - Err(AniseError::DecodingError(e)) + } + Err(err) => { + error!("context bytes not in ANISE format"); + Err(DataSetError::DataDecoding { + action: "loading SemVer", + source: DecodingError::DecodingDer { err }, + }) } } } @@ -182,7 +292,7 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSet<'a, T, ENTRIES> { /// Compute the CRC32 of the underlying bytes pub fn crc32(&self) -> u32 { - crc32fast::hash(self.bytes) + crc32fast::hash(&self.bytes) } /// Sets the checksum of this data. @@ -190,9 +300,9 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSet<'a, T, ENTRIES> { self.data_checksum = self.crc32(); } - pub fn check_integrity(&self) -> Result<(), AniseError> { + pub fn check_integrity(&self) -> Result<(), IntegrityError> { // Ensure that the data is correctly decoded - let computed_chksum = crc32fast::hash(self.bytes); + let computed_chksum = crc32fast::hash(&self.bytes); if computed_chksum == self.data_checksum { Ok(()) } else { @@ -200,68 +310,89 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSet<'a, T, ENTRIES> { "[integrity] expected hash {} but computed {}", self.data_checksum, computed_chksum ); - Err(AniseError::IntegrityError( - IntegrityErrorKind::ChecksumInvalid { - expected: self.data_checksum, - computed: computed_chksum, - }, - )) + Err(IntegrityError::ChecksumInvalid { + expected: self.data_checksum, + computed: computed_chksum, + }) } } /// Scrubs the data by computing the CRC32 of the bytes and making sure that it still matches the previously known hash - pub fn scrub(&self) -> Result<(), AniseError> { + pub fn scrub(&self) -> Result<(), IntegrityError> { if self.crc32() == self.data_checksum { Ok(()) } else { // Compiler will optimize the double computation away - Err(AniseError::IntegrityError( - IntegrityErrorKind::ChecksumInvalid { - expected: self.data_checksum, - computed: self.crc32(), - }, - )) + Err(IntegrityError::ChecksumInvalid { + expected: self.data_checksum, + computed: self.crc32(), + }) } } - pub fn get_by_id(&self, id: NaifId) -> Result { + pub fn get_by_id(&'a self, id: NaifId) -> Result { if let Some(entry) = self.lut.by_id.get(&id) { // Found the ID - match T::from_der(&self.bytes[entry.as_range()]) { - Ok(data) => Ok(data), - Err(e) => { - println!("{e:?}"); - dbg!(&self.bytes[entry.as_range()]); - Err(AniseError::MalformedData(entry.start_idx as usize)) - } - } + let bytes = self + .bytes + .get(entry.as_range()) + .ok_or_else(|| entry.decoding_error()) + .with_context(|_| DataDecodingSnafu { + action: "fetching by ID", + })?; + T::from_der(bytes) + .map_err(|err| DecodingError::DecodingDer { err }) + .with_context(|_| DataDecodingSnafu { + action: "fetching by ID", + }) } else { - Err(AniseError::ItemNotFound) + Err(DataSetError::DataSetLut { + action: "fetching by ID", + source: LutError::UnknownId { id }, + }) } } - pub fn get_by_name(&self, id: NaifId) -> Result { - if let Some(entry) = self.lut.by_id.get(&id) { - // Found the ID - if let Ok(data) = T::from_der(&self.bytes[entry.as_range()]) { - Ok(data) - } else { - Err(AniseError::MalformedData(entry.start_idx as usize)) - } + pub fn get_by_name(&'a self, name: &str) -> Result { + if let Some(entry) = self.lut.by_name.get(&name) { + // Found the name + let bytes = self + .bytes + .get(entry.as_range()) + .ok_or_else(|| entry.decoding_error()) + .with_context(|_| DataDecodingSnafu { + action: "fetching by name", + })?; + T::from_der(bytes) + .map_err(|err| DecodingError::DecodingDer { err }) + .with_context(|_| DataDecodingSnafu { + action: "fetching by name", + }) } else { - Err(AniseError::ItemNotFound) + Err(DataSetError::DataSetLut { + action: "fetching by ID", + source: LutError::UnknownName { + name: name.to_string(), + }, + }) } } /// Saves this dataset to the provided file /// If overwrite is set to false, and the filename already exists, this function will return an error. - pub fn save_as(&self, filename: PathBuf, overwrite: bool) -> Result<(), AniseError> { + pub fn save_as(&self, filename: PathBuf, overwrite: bool) -> Result<(), DataSetError> { use log::{info, warn}; if Path::new(&filename).exists() { if !overwrite { - return Err(AniseError::FileExists); + return Err(DataSetError::IO { + source: IOError::new( + IOErrorKind::AlreadyExists, + "file exists and overwrite flag set to false", + ), + action: "creating data set file", + }); } else { warn!("[save_as] overwriting {}", filename.display()); } @@ -271,24 +402,33 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSet<'a, T, ENTRIES> { match File::create(&filename) { Ok(mut file) => { - if let Err(e) = self.encode_to_vec(&mut buf) { - return Err(AniseError::DecodingError(e)); + if let Err(err) = self.encode_to_vec(&mut buf) { + return Err(DataSetError::DataDecoding { + action: "encoding data set", + source: DecodingError::DecodingDer { err }, + }); } - if let Err(e) = file.write_all(&buf) { - Err(e.kind().into()) + if let Err(source) = file.write_all(&buf) { + Err(DataSetError::IO { + source, + action: "writing data set to file", + }) } else { info!("[OK] dataset saved to {}", filename.display()); Ok(()) } } - Err(e) => Err(e.kind().into()), + Err(source) => Err(DataSetError::IO { + source, + action: "creating data set file", + }), } } } impl<'a, T: DataSetT<'a>, const ENTRIES: usize> Encode for DataSet<'a, T, ENTRIES> { fn encoded_len(&self) -> der::Result { - let as_byte_ref = OctetStringRef::new(self.bytes)?; + let as_byte_ref = OctetStringRef::new(&self.bytes)?; self.metadata.encoded_len()? + self.lut.encoded_len()? + self.data_checksum.encoded_len()? @@ -296,7 +436,7 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> Encode for DataSet<'a, T, ENTRIE } fn encode(&self, encoder: &mut impl Writer) -> der::Result<()> { - let as_byte_ref = OctetStringRef::new(self.bytes)?; + let as_byte_ref = OctetStringRef::new(&self.bytes)?; self.metadata.encode(encoder)?; self.lut.encode(encoder)?; self.data_checksum.encode(encoder)?; @@ -314,7 +454,7 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> Decode<'a> for DataSet<'a, T, EN metadata, lut, data_checksum: crc32_checksum, - bytes: bytes.as_bytes(), + bytes: Bytes::copy_from_slice(bytes.as_bytes()), _daf_type: PhantomData::, }) } @@ -338,7 +478,9 @@ mod dataset_ut { dataset::DataSetBuilder, lookuptable::Entry, spacecraft::{DragData, Inertia, Mass, SRPData, SpacecraftData}, + SpacecraftDataSet, }; + use bytes::Bytes; use super::{DataSet, Decode, Encode, LookUpTable}; @@ -426,17 +568,17 @@ mod dataset_ut { lut.append(-20, "SRP spacecraft", srp_sc_entry).unwrap(); lut.append(-50, "Full spacecraft", full_sc_entry).unwrap(); // Build the dataset - let mut dataset = DataSet::default(); - dataset.lut = lut; - dataset.bytes = &packed_buf; + let mut dataset = DataSet { + lut, + bytes: Bytes::copy_from_slice(&packed_buf), + ..Default::default() + }; dataset.set_crc32(); // And encode it. let mut buf = vec![]; dataset.encode_to_vec(&mut buf).unwrap(); - dbg!(buf.len()); - let repr_dec = DataSet::::from_der(&buf).unwrap(); assert_eq!(dataset, repr_dec); @@ -505,7 +647,7 @@ mod dataset_ut { .push_into(&mut buf, srp_sc, None, Some("ID less SRP spacecraft")) .unwrap(); - let dataset = builder.finalize(&buf).unwrap(); + let dataset = builder.finalize(buf).unwrap(); // And encode it. @@ -514,7 +656,7 @@ mod dataset_ut { dbg!(ebuf.len()); - let repr_dec = DataSet::::from_bytes(&ebuf); + let repr_dec = SpacecraftDataSet::from_bytes(&ebuf); assert_eq!(dataset, repr_dec); @@ -529,6 +671,6 @@ mod dataset_ut { assert_eq!(srp_repr, srp_sc); // And check that we get an error if the data is wrong. - assert!(repr_dec.get_by_id(0).is_err()) + assert!(repr_dec.get_by_id(0).is_err()); } } diff --git a/src/structure/lookuptable.rs b/src/structure/lookuptable.rs index 952211b1..24d22657 100644 --- a/src/structure/lookuptable.rs +++ b/src/structure/lookuptable.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -13,8 +13,28 @@ use der::{ }; use heapless::FnvIndexMap; use log::warn; - -use crate::{prelude::AniseError, NaifId}; +use snafu::prelude::*; + +use crate::{errors::DecodingError, NaifId}; + +#[derive(Debug, Snafu, PartialEq)] +#[snafu(visibility(pub(crate)))] +pub enum LutError { + #[snafu(display( + "ID LUT is full with all {max_slots} taken (increase ENTRIES at build time)" + ))] + IdLutFull { max_slots: usize }, + #[snafu(display( + "Names LUT is full with all {max_slots} taken (increase ENTRIES at build time)" + ))] + NameLutFull { max_slots: usize }, + #[snafu(display("must provide either an ID or a name for a loop up, but provided neither"))] + NoKeyProvided, + #[snafu(display("ID {id} not in look up table"))] + UnknownId { id: NaifId }, + #[snafu(display("name {name} not in look up table"))] + UnknownName { name: String }, +} /// A lookup table entry contains the start and end indexes in the data array of the data that is sought after. /// @@ -31,6 +51,14 @@ impl Entry { pub(crate) fn as_range(&self) -> core::ops::Range { self.start_idx as usize..self.end_idx as usize } + /// Returns a pre-populated decoding error + pub(crate) fn decoding_error(&self) -> DecodingError { + DecodingError::InaccessibleBytes { + start: self.start_idx as usize, + end: self.end_idx as usize, + size: (self.end_idx - self.start_idx) as usize, + } + } } impl Encode for Entry { @@ -66,27 +94,27 @@ pub struct LookUpTable<'a, const ENTRIES: usize> { } impl<'a, const ENTRIES: usize> LookUpTable<'a, ENTRIES> { - pub fn append(&mut self, id: i32, name: &'a str, entry: Entry) -> Result<(), AniseError> { + pub fn append(&mut self, id: i32, name: &'a str, entry: Entry) -> Result<(), LutError> { self.by_id .insert(id, entry) - .map_err(|_| AniseError::StructureIsFull)?; + .map_err(|_| LutError::IdLutFull { max_slots: ENTRIES })?; self.by_name .insert(name, entry) - .map_err(|_| AniseError::StructureIsFull)?; + .map_err(|_| LutError::NameLutFull { max_slots: ENTRIES })?; Ok(()) } - pub fn append_id(&mut self, id: i32, entry: Entry) -> Result<(), AniseError> { + pub fn append_id(&mut self, id: i32, entry: Entry) -> Result<(), LutError> { self.by_id .insert(id, entry) - .map_err(|_| AniseError::StructureIsFull)?; + .map_err(|_| LutError::IdLutFull { max_slots: ENTRIES })?; Ok(()) } - pub fn append_name(&mut self, name: &'a str, entry: Entry) -> Result<(), AniseError> { + pub fn append_name(&mut self, name: &'a str, entry: Entry) -> Result<(), LutError> { self.by_name .insert(name, entry) - .map_err(|_| AniseError::StructureIsFull)?; + .map_err(|_| LutError::NameLutFull { max_slots: ENTRIES })?; Ok(()) } @@ -184,7 +212,6 @@ impl<'a, const ENTRIES: usize> Decode<'a> for LookUpTable<'a, ENTRIES> { } if !lut.check_integrity() { - // TODO: Change this to print the error but don't prevent loading the data. warn!( "decoded lookup table is not integral: {} names but {} ids", lut.by_name.len(), @@ -218,7 +245,7 @@ mod lut_ut { let mut repr = LookUpTable::<32>::default(); let num_bytes = 363; for i in 0..32 { - let id = -20 - (i as i32); + let id = -20 - i; repr.append_id( id, Entry { @@ -250,9 +277,9 @@ mod lut_ut { names.push(format!("Name{}", i)); } - for i in 0..LUT_SIZE { + for (i, name) in names.iter().enumerate().take(LUT_SIZE) { repr.append_name( - &names[i], + name, Entry { start_idx: (i * num_bytes) as u32, end_idx: ((i + 1) * num_bytes) as u32, diff --git a/src/structure/metadata.rs b/src/structure/metadata.rs index 153c21e5..a65bca48 100644 --- a/src/structure/metadata.rs +++ b/src/structure/metadata.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -12,7 +12,7 @@ use core::str::FromStr; use der::{asn1::Utf8StringRef, Decode, Encode, Reader, Writer}; use hifitime::Epoch; -use crate::prelude::AniseError; +use crate::errors::DecodingError; use super::{dataset::DataSetType, semver::Semver, ANISE_VERSION}; @@ -32,10 +32,22 @@ pub struct Metadata<'a> { impl<'a> Metadata<'a> { /// Only decode the anise version and dataset type - pub fn decode_header(bytes: &[u8]) -> Result { - let anise_version = Semver::from_der(&bytes[..5]).map_err(AniseError::DecodingError)?; - let dataset_type = - DataSetType::from_der(&bytes[5..8]).map_err(AniseError::DecodingError)?; + pub fn decode_header(bytes: &[u8]) -> Result { + let anise_version = + Semver::from_der(bytes.get(..5).ok_or(DecodingError::InaccessibleBytes { + start: 0, + end: 5, + size: bytes.len(), + })?) + .map_err(|err| DecodingError::DecodingDer { err })?; + let dataset_type = DataSetType::from_der(bytes.get(5..8).ok_or({ + DecodingError::InaccessibleBytes { + start: 5, + end: 8, + size: bytes.len(), + } + })?) + .map_err(|err| DecodingError::DecodingDer { err })?; let me = Self { anise_version, dataset_type, @@ -90,7 +102,7 @@ impl<'a> Decode<'a> for Metadata<'a> { impl<'a> fmt::Display for Metadata<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - writeln!(f, "ANISE version {}", self.anise_version)?; + write!(f, "ANISE version {}", self.anise_version)?; writeln!( f, "Originator: {}", @@ -112,3 +124,54 @@ impl<'a> fmt::Display for Metadata<'a> { ) } } + +#[cfg(test)] +mod metadata_ut { + use super::Metadata; + use der::{Decode, Encode}; + + #[test] + fn meta_encdec_min_repr() { + // A minimal representation of a planetary constant. + let repr = Metadata::default(); + + let mut buf = vec![]; + repr.encode_to_vec(&mut buf).unwrap(); + + let repr_dec = Metadata::from_der(&buf).unwrap(); + + assert_eq!(repr, repr_dec); + + assert_eq!( + format!("{repr}"), + format!( + r#"ANISE version ANISE version 0.0.1 +Originator: (not set) +Creation date: {} +Metadata URI: (not set) +"#, + repr_dec.creation_date + ) + ); + } + + #[test] + fn meta_invalid() { + let repr = Metadata::default(); + + let mut buf = vec![]; + repr.encode_to_vec(&mut buf).unwrap(); + + // Check that we can decode the header only + assert!(Metadata::decode_header(&buf).is_ok()); + // Check that reducing the number of bytes prevents decoding the header + assert!( + Metadata::decode_header(&buf[..7]).is_err(), + "should not have enough for dataset" + ); + assert!( + Metadata::decode_header(&buf[..4]).is_err(), + "should not have enough for version" + ); + } +} diff --git a/src/structure/mod.rs b/src/structure/mod.rs index 5e5010df..7519a5bd 100644 --- a/src/structure/mod.rs +++ b/src/structure/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -19,7 +19,11 @@ pub mod planetocentric; pub mod semver; pub mod spacecraft; -use self::semver::Semver; +use self::{ + dataset::DataSet, planetocentric::PlanetaryData, semver::Semver, spacecraft::SpacecraftData, +}; +use crate::almanac::{MAX_PLANETARY_DATA, MAX_SPACECRAFT_DATA}; + /// The current version of ANISE pub const ANISE_VERSION: Semver = Semver { major: 0, @@ -27,5 +31,5 @@ pub const ANISE_VERSION: Semver = Semver { patch: 1, }; -/// The maximum number of trajectories that can be loaded in a single context -pub const MAX_TRAJECTORIES: usize = 31; +pub type SpacecraftDataSet<'a> = DataSet<'a, SpacecraftData<'a>, MAX_SPACECRAFT_DATA>; +pub type PlanetaryDataSet<'a> = DataSet<'a, PlanetaryData, MAX_PLANETARY_DATA>; diff --git a/src/structure/planetocentric/ellipsoid.rs b/src/structure/planetocentric/ellipsoid.rs index 47facf1e..402ca633 100644 --- a/src/structure/planetocentric/ellipsoid.rs +++ b/src/structure/planetocentric/ellipsoid.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/structure/planetocentric/mod.rs b/src/structure/planetocentric/mod.rs index ebcf15bd..322fc7d9 100644 --- a/src/structure/planetocentric/mod.rs +++ b/src/structure/planetocentric/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -8,7 +8,10 @@ * Documentation: https://nyxspace.com/ */ -use crate::NaifId; +use crate::{ + prelude::{Frame, FrameUid}, + NaifId, +}; pub mod ellipsoid; pub mod nutprec; pub mod phaseangle; @@ -42,9 +45,20 @@ pub struct PlanetaryData { pub nut_prec_angles: [NutationPrecessionAngle; MAX_NUT_PREC_ANGLES], } -impl<'a> DataSetT<'a> for PlanetaryData {} +impl<'a> DataSetT<'a> for PlanetaryData { + const NAME: &'static str = "planetary data"; +} impl PlanetaryData { + /// Converts this planetary data into a Frame + pub fn to_frame(&self, uid: FrameUid) -> Frame { + Frame { + ephemeris_id: uid.ephemeris_id, + orientation_id: uid.orientation_id, + mu_km3_s2: Some(self.mu_km3_s2), + shape: self.shape, + } + } /// Specifies what data is available in this structure. /// /// Returns: @@ -328,7 +342,7 @@ mod planetary_constants_ut { ]); assert_eq!(prime_m.as_ref().unwrap().coeffs_count, 13); - let gm_moon = 4.9028000661637961E+03; + let gm_moon = 4.902_800_066_163_796E3; let moon = PlanetaryData { object_id: 301, diff --git a/src/structure/planetocentric/nutprec.rs b/src/structure/planetocentric/nutprec.rs index 34c65ce5..cae93745 100644 --- a/src/structure/planetocentric/nutprec.rs +++ b/src/structure/planetocentric/nutprec.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/structure/planetocentric/phaseangle.rs b/src/structure/planetocentric/phaseangle.rs index 0d256391..c26391da 100644 --- a/src/structure/planetocentric/phaseangle.rs +++ b/src/structure/planetocentric/phaseangle.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/structure/records/mod.rs b/src/structure/records/mod.rs deleted file mode 100644 index 538aa033..00000000 --- a/src/structure/records/mod.rs +++ /dev/null @@ -1,19 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ -use der::{Decode, Encode}; - -use crate::prelude::AniseError; - -pub trait Record<'a>: Encode + Decode<'a> { - /// Returns whether or not the integrity of the data is correct. - fn check_integrity(&self) -> Result<(), AniseError> { - Ok(()) - } -} diff --git a/src/structure/save.rs b/src/structure/save.rs deleted file mode 100644 index 4672c4f9..00000000 --- a/src/structure/save.rs +++ /dev/null @@ -1,74 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ - -use crate::errors::{AniseError, InternalErrorKind}; -use der::{Decode, Encode}; -use log::warn; -use std::fs::File; -use std::io::Write; -use std::path::Path; - -/// A trait to encode / decode ANISE specific data. -pub trait Asn1Serde<'a>: Encode + Decode<'a> { - /// Saves this context in the providef filename. - /// If overwrite is set to false, and the filename already exists, this function will return an error. - /// - /// TODO: This function should only be available with the alloc feature gate. - fn save_as(&self, filename: &'a str, overwrite: bool) -> Result<(), AniseError> { - match self.encoded_len() { - Err(e) => Err(AniseError::InternalError(e.into())), - Ok(length) => { - let len: u32 = length.into(); - // Fill the vector with zeros - let mut buf = vec![0x0; len as usize]; - self.save_as_via_buffer(filename, overwrite, &mut buf) - } - } - } - - /// Saves this context in the providef filename. - /// If overwrite is set to false, and the filename already exists, this function will return an error. - fn save_as_via_buffer( - &self, - filename: &'a str, - overwrite: bool, - buf: &mut [u8], - ) -> Result<(), AniseError> { - if Path::new(filename).exists() { - if !overwrite { - return Err(AniseError::FileExists); - } else { - warn!("[save_as] overwriting {filename}"); - } - } - - match File::create(filename) { - Ok(mut file) => { - if let Err(e) = self.encode_to_slice(buf) { - return Err(InternalErrorKind::Asn1Error(e).into()); - } - if let Err(e) = file.write_all(buf) { - Err(e.kind().into()) - } else { - Ok(()) - } - } - Err(e) => Err(e.kind().into()), - } - } - - /// Attempts to load this data from its bytes - fn try_from_bytes(bytes: &'a [u8]) -> Result { - match Self::from_der(bytes) { - Ok(yay) => Ok(yay), - Err(e) => Err(AniseError::DecodingError(e)), - } - } -} diff --git a/src/structure/semver.rs b/src/structure/semver.rs index 3d4bdc3d..13967e4c 100644 --- a/src/structure/semver.rs +++ b/src/structure/semver.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/structure/spacecraft/drag.rs b/src/structure/spacecraft/drag.rs index b713831f..28f0d878 100644 --- a/src/structure/spacecraft/drag.rs +++ b/src/structure/spacecraft/drag.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/structure/spacecraft/inertia.rs b/src/structure/spacecraft/inertia.rs index e16357c3..ae943798 100644 --- a/src/structure/spacecraft/inertia.rs +++ b/src/structure/spacecraft/inertia.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/structure/spacecraft/mass.rs b/src/structure/spacecraft/mass.rs index e5dcdba5..616642fd 100644 --- a/src/structure/spacecraft/mass.rs +++ b/src/structure/spacecraft/mass.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/structure/spacecraft/mod.rs b/src/structure/spacecraft/mod.rs index 8c1e9679..897b53fa 100644 --- a/src/structure/spacecraft/mod.rs +++ b/src/structure/spacecraft/mod.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. @@ -38,7 +38,9 @@ pub struct SpacecraftData<'a> { pub inertia: Option, } -impl<'a> DataSetT<'a> for SpacecraftData<'a> {} +impl<'a> DataSetT<'a> for SpacecraftData<'a> { + const NAME: &'static str = "spacecraft data"; +} impl<'a> SpacecraftData<'a> { /// Specifies what data is available in this structure. diff --git a/src/structure/spacecraft/srp.rs b/src/structure/spacecraft/srp.rs index 92d4b302..084ebec2 100644 --- a/src/structure/spacecraft/srp.rs +++ b/src/structure/spacecraft/srp.rs @@ -1,6 +1,6 @@ /* * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) + * Copyright (C) 2021-2023 Christopher Rabotin et al. (cf. AUTHORS.md) * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. diff --git a/src/structure/spline/covkind.rs b/src/structure/spline/covkind.rs deleted file mode 100644 index 16d30144..00000000 --- a/src/structure/spline/covkind.rs +++ /dev/null @@ -1,188 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ -use der::{Decode, Encode, Reader, Writer}; - -use crate::DBL_SIZE; - -use super::statekind::StateKind; - -/// Covariance Kind defines what kind of covariance is stored in the spline, if at all. -/// Under the hood, this works exactly like [StateKind] since the CovKind structure has a single field `data` which is a StateKind. -/// -/// # Storage requirements and field ordering -/// Covariance information requires more data than just a state since it includes both the covariance and the variance between different elements. -/// In ANISE, this is stored as an upper triangular matrix. -/// -/// ## Position variance storage -/// -/// The order of the data is as follows: -/// 1. cov_x_x -/// 2. cov_y_x -/// 3. cov_y_y -/// 4. cov_z_x -/// 5. cov_z_y -/// 6. cov_z_z -/// -/// Hence, if the covariance is interpolated with a degree 6, then the position covariance of a single spline is stored as a contiguous octet array of 288 octets: -/// -/// | field | length | start octet | end octet -/// | -- | -- | -- | -- | -/// | cov_x_x | 6*8 = 48 | 0 | 47 -/// | cov_y_x | 6*8 = 48 | 48 | 95 -/// | cov_y_y | 48 | 96 | 143 -/// | cov_z_x | 48 | 144 | 191 -/// | cov_z_y | 48 | 192 | 239 -/// | cov_z_z | 48 | 240 | 287 -/// -/// ### Example -/// Storing the position and velocity covariance, interpolated as a 6 degree polynomial will require **6** fields of **6 * 8 = 48* octets each, leading to **288 octets per spline**. -/// -/// ## Position and velocity variance storage -/// -/// It is not possible to store the velocity variance without also storing the position variance. If we've missed a use case where this is relevant, please open an issue. -/// -/// Additional fields for the velocity variance. -/// -/// + cov_vx_x -/// + cov_vx_y -/// + cov_vx_z -/// + cov_vx_vx -/// -/// + cov_vy_x -/// + cov_vy_y -/// + cov_vy_z -/// + cov_vy_vx -/// + cov_vy_vy -/// -/// + cov_vz_x -/// + cov_vz_y -/// + cov_vz_z -/// + cov_vz_vx -/// + cov_vz_vy -/// + cov_vz_vz -/// -/// ### Example -/// Storing the position and velocity covariance, interpolated as a 6 degree polynomial will require **6 + 15 = 21** fields of **6 * 8 = 48* octets each, leading to **1008 octets per spline**. -/// -/// ## Position, velocity, and acceleration variance storage -/// -/// We also don't know of a use case where one would need to store the variance of the acceleration, but it's supported because the support is relatively easy. -/// **Warning:** this will add 7+8+9 = 24 fields, each storing one 64-bit floating point number _per interpolation degree_. -/// -/// + cov_ax_x -/// + cov_ax_y -/// + cov_ax_z -/// + cov_ax_vx -/// + cov_ax_vy -/// + cov_ax_vz -/// + cov_ax_ax -/// -/// + cov_ay_x -/// + cov_ay_y -/// + cov_ay_z -/// + cov_ay_vx -/// + cov_ay_vy -/// + cov_ay_vz -/// + cov_ay_ax -/// + cov_ay_ay -/// -/// + cov_az_x -/// + cov_az_y -/// + cov_az_z -/// + cov_az_vx -/// + cov_az_vy -/// + cov_az_vz -/// + cov_az_ax -/// + cov_az_ay -/// + cov_az_az -/// -/// ### Example -/// Storing the full covariance of position, velocity, and acceleration, interpolated as a 6 degree polynomial will require **6 + 15 + 24 = 45** fields of **6 * 8 = 48* octets each, leading to **2160 octets per spline**. - -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)] -pub struct CovKind { - pub(crate) data: StateKind, -} - -impl CovKind { - pub const fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Returns the length in octets required to store this covariance information - pub const fn len(&self) -> usize { - let num_items = match self.data { - StateKind::None => 0, - StateKind::Position { degree } => degree * 6, - StateKind::PositionVelocity { degree } => degree * (6 + 15), - StateKind::PositionVelocityAcceleration { degree } => degree * (6 + 15 + 21), - }; - DBL_SIZE * (num_items as usize) - } - - /// Returns the interpolation degree - pub const fn degree(&self) -> u8 { - match &self.data { - StateKind::None => 0, - StateKind::Position { degree } => *degree, - StateKind::PositionVelocity { degree } => *degree, - StateKind::PositionVelocityAcceleration { degree } => *degree, - } - } -} - -impl Default for CovKind { - fn default() -> Self { - Self { - data: StateKind::None, - } - } -} - -/// Allows conversion of the CovKind into a u8 with the following mapping. -impl From for u16 { - fn from(kind: CovKind) -> Self { - u16::from(kind.data) - } -} - -impl From<&CovKind> for u16 { - fn from(kind: &CovKind) -> Self { - u16::from(*kind) - } -} - -/// Allows conversion of a u8 into a CovKind. -impl From for CovKind { - fn from(val: u16) -> Self { - Self { - data: StateKind::from(val), - } - } -} - -impl Encode for CovKind { - fn encoded_len(&self) -> der::Result { - let converted: u16 = self.into(); - converted.encoded_len() - } - - fn encode(&self, encoder: &mut impl Writer) -> der::Result<()> { - let converted: u16 = self.into(); - converted.encode(encoder) - } -} - -impl<'a> Decode<'a> for CovKind { - fn decode>(decoder: &mut R) -> der::Result { - let converted: u16 = decoder.decode()?; - Ok(Self::from(converted)) - } -} diff --git a/src/structure/spline/evenness.rs b/src/structure/spline/evenness.rs deleted file mode 100644 index a3c1b98c..00000000 --- a/src/structure/spline/evenness.rs +++ /dev/null @@ -1,96 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ -use der::{Decode, Encode, Reader, Tag, Writer}; - -use crate::DBL_SIZE; - -/// Splice Space defines whether this is an equal-time step interpolation spline (called `Even` splines in ANISE) or an unequal-time step spline (called `Uneven`). -/// -/// # Even splines -/// -/// These store data like what would typically be stored in the NAIF SPK Types 2, 3, 8, and 12. The interpolation of the trajectory is done over a fixed time window, e.g. 16 days. -/// In ANISE, a single interpolation spline must be less than 4000 years because the window duration is stored in nanoseconds on an unsigned integer. -/// -/// ## Querying -/// To query the set of coefficients needed for a given interpolation, the following algorithm applies given the desired epoch `epoch` as an input parameter. -/// 1. Compute `delta_ns`: the difference in nanoseconds between `epoch` and the ephemeris start epoch (making sure that both are in the same time system). -/// 2. Compute the spline index `spl_idx` by performing the integer division between `delta_ns` and the spline duration `duration_ns` (defined in the meta data of the splines). -/// 3. Seek through the byte string of the spline data by chunks of the spline length, which depends on the kind of data stored (Position, etc.) and the existence of not of covariance information. -/// -/// Defines the two kinds of spacing splines supports: equal time steps (fixed sized interpolation) or unequal time steps (also called sliding window) -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Evenness { - Even { - duration_ns: u64, - }, - Uneven { - /// Unevenly spaced window ephemerides may only span five centuries to constraint stack size - indexes: [i16; 5], // TODO: Consider 10? Or just enough for DE in full. - }, -} - -impl Evenness { - pub const fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Returns the length in octets that precedes the spline coefficient data. - /// - /// For example, if this is set to 16, it means that the spline coefficient data starts at an offset of 16 compared to the start of the spline itself. - pub const fn len(&self) -> usize { - DBL_SIZE - * match self { - Self::Even { duration_ns: _ } => 1, - Self::Uneven { indexes: _ } => 2, - } - } -} - -impl Default for Evenness { - fn default() -> Self { - Self::Even { duration_ns: 0 } - } -} - -impl Encode for Evenness { - fn encoded_len(&self) -> der::Result { - match self { - Self::Even { duration_ns } => (*duration_ns).encoded_len(), - Self::Uneven { indexes: _indexes } => { - todo!() - } - } - } - - fn encode(&self, encoder: &mut impl Writer) -> der::Result<()> { - match self { - Self::Even { duration_ns } => (*duration_ns).encode(encoder), - Self::Uneven { indexes: _indexes } => { - todo!() - } - } - } -} - -impl<'a> Decode<'a> for Evenness { - fn decode>(decoder: &mut R) -> der::Result { - // Check the header tag to decode this CHOICE - if decoder.peek_tag()? == Tag::Integer { - Ok(Self::Even { - duration_ns: decoder.decode()?, - }) - } else { - decoder.sequence(|sdecoder| { - let indexes = sdecoder.decode()?; - Ok(Self::Uneven { indexes }) - }) - } - } -} diff --git a/src/structure/spline/field.rs b/src/structure/spline/field.rs deleted file mode 100644 index b1d53f1b..00000000 --- a/src/structure/spline/field.rs +++ /dev/null @@ -1,135 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ - -/// The fields that can be queried for spline data. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum Field { - MidPoint, - Duration, - X, - Y, - Z, - Vx, - Vy, - Vz, - Ax, - Ay, - Az, - CovXX, - CovYZ, - CovYY, - CovZX, - CovZY, - CovZZ, - CovVxX, - CovVxY, - CovVxZ, - CovVxVx, - CovVyX, - CovVyY, - CovVyZ, - CovVyVx, - CovVyVy, - CovVzX, - CovVzY, - CovVzZ, - CovVzVx, - CovVzVy, - CovVzVz, - CovAxX, - CovAxY, - CovAxZ, - CovAxVx, - CovAxVy, - CovAxVz, - CovAxAx, - CovAyX, - CovAyY, - CovAyZ, - CovAyVx, - CovAyVy, - CovAyVz, - CovAyAx, - CovAyAy, - CovAzX, - CovAzY, - CovAzZ, - CovAzVx, - CovAzVy, - CovAzVz, - CovAzAx, - CovAzAy, - CovAzAz, -} - -impl Field { - pub const fn is_position(&self) -> bool { - matches!(self, Self::X | Self::Y | Self::Z) - } - - pub const fn is_velocity(&self) -> bool { - matches!(self, Self::Vx | Self::Vy | Self::Vz) - } - - pub const fn is_acceleration(&self) -> bool { - matches!(self, Self::Ax | Self::Ay | Self::Az) - } - - pub const fn is_covariance(&self) -> bool { - matches!( - self, - Self::CovXX - | Self::CovYZ - | Self::CovYY - | Self::CovZX - | Self::CovZY - | Self::CovZZ - | Self::CovVxX - | Self::CovVxY - | Self::CovVxZ - | Self::CovVxVx - | Self::CovVyX - | Self::CovVyY - | Self::CovVyZ - | Self::CovVyVx - | Self::CovVyVy - | Self::CovVzX - | Self::CovVzY - | Self::CovVzZ - | Self::CovVzVx - | Self::CovVzVy - | Self::CovVzVz - | Self::CovAxX - | Self::CovAxY - | Self::CovAxZ - | Self::CovAxVx - | Self::CovAxVy - | Self::CovAxVz - | Self::CovAxAx - | Self::CovAyX - | Self::CovAyY - | Self::CovAyZ - | Self::CovAyVx - | Self::CovAyVy - | Self::CovAyVz - | Self::CovAyAx - | Self::CovAyAy - | Self::CovAzX - | Self::CovAzY - | Self::CovAzZ - | Self::CovAzVx - | Self::CovAzVy - | Self::CovAzVz - | Self::CovAzAx - | Self::CovAzAy - | Self::CovAzAz - ) - } -} diff --git a/src/structure/spline/meta.rs b/src/structure/spline/meta.rs deleted file mode 100644 index 0a6ee3ac..00000000 --- a/src/structure/spline/meta.rs +++ /dev/null @@ -1,115 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ -use der::{Decode, Encode, Reader, Writer}; - -use crate::{prelude::AniseError, DBL_SIZE}; - -use super::{covkind::CovKind, evenness::Evenness, statekind::StateKind, Field}; - -#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)] - -pub struct SplineMeta { - /// Defines whether this is an evenly or unevenly timed spline - pub evenness: Evenness, - /// Defines what kind of state data is stored in this spline - pub state_kind: StateKind, - /// Defines what kind of covariance data is stored in this spline - pub cov_kind: CovKind, -} - -impl SplineMeta { - /// Returns the offset (in bytes) in the octet string - pub fn spline_offset(&self, idx: usize) -> usize { - idx * self.len() - } - - /// Returns the offset of this field in the spline given how this spline is set up. - /// This may return an error when requesting a field that is not available. - pub fn field_offset(&self, field: Field, coeff_idx: usize) -> Result { - // Make the field is valid in this spline. - if (self.cov_kind.is_empty() && field.is_covariance()) - || (!field.is_covariance() && self.state_kind.is_empty()) - { - Err(AniseError::ParameterNotSpecified) - } else { - // TODO Make sure the position data is also there. - // Padding from header (e.g. one double for even splines, two for uneven splines). - let header_padding = self.evenness.len(); - // Offset from the requested field (e.g. coefficients for X are stored before those for Y components). - let field_offset = match field { - Field::MidPoint => { - // Special case: the midpoint is always at the start of each spline. - return Ok(0); - } - Field::Duration => { - if header_padding == 2 { - // Special case: the duration of the spline is always the second item of each spline, if this spline type supports it - return Ok(DBL_SIZE); - } else { - return Err(AniseError::ParameterNotSpecified); - } - } - Field::X => 0, - Field::Y => 1, - Field::Z => 2, - Field::Vx => 3, - Field::Vy => 4, - Field::Vz => 5, - Field::Ax => 6, - Field::Ay => 7, - Field::Az => 8, - _ => unreachable!(), - }; - - // Offset to reach the correct coefficient given the index, e.g. to get the 3rd Y component, - // the total offset in the spline should be header_padding + 1 * num of coeffs + coefficient index. - Ok(header_padding - + field_offset * (self.state_kind.degree() as usize) * DBL_SIZE - + coeff_idx * DBL_SIZE) - } - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Returns the length of a spline in bytes - pub fn len(&self) -> usize { - self.evenness.len() + self.state_kind.len() + self.cov_kind.len() - } -} - -impl Encode for SplineMeta { - fn encoded_len(&self) -> der::Result { - self.evenness.encoded_len()? - + self.state_kind.encoded_len()? - + self.cov_kind.encoded_len()? - } - - fn encode(&self, encoder: &mut impl Writer) -> der::Result<()> { - self.evenness.encode(encoder)?; - self.state_kind.encode(encoder)?; - self.cov_kind.encode(encoder) - } -} - -impl<'a> Decode<'a> for SplineMeta { - fn decode>(decoder: &mut R) -> der::Result { - let spacing = decoder.decode()?; - let state_kind = decoder.decode()?; - let cov_kind = decoder.decode()?; - - Ok(Self { - evenness: spacing, - state_kind, - cov_kind, - }) - } -} diff --git a/src/structure/spline/mod.rs b/src/structure/spline/mod.rs deleted file mode 100644 index 446f73e9..00000000 --- a/src/structure/spline/mod.rs +++ /dev/null @@ -1,22 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ - -mod covkind; -pub use covkind::*; -mod evenness; -pub use evenness::*; -mod meta; -pub use meta::*; -mod splines; -pub use splines::*; -mod statekind; -pub use statekind::*; -mod field; -pub use field::*; diff --git a/src/structure/spline/splines.rs b/src/structure/spline/splines.rs deleted file mode 100644 index 9a443160..00000000 --- a/src/structure/spline/splines.rs +++ /dev/null @@ -1,62 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ -use der::{asn1::OctetStringRef, Decode, Encode, Length, Reader, Writer}; - -use super::meta::SplineMeta; - -// #[derive(Enumerated)] -// #[repr(u8)] -// pub enum TruncationStrategy { -// None = 0, -// TruncateLow = 1, -// TruncateHigh = 2, -// } - -// WARNING: How do I specify the start and end epochs for variable sized windows where the duration in the window is needed to rebuild the state? -// Is that some kind of header? If so, what's its size? If it's a high precision epoch, it would be 80 bits, but more likely people will provide 64 bit floats. -// Also, I can't use an offset from the index because the splines are built separately from the index via multithreading, so that would be difficult to build (would need to mutate the spline prior to encoding) - -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct Splines<'a> { - /// Metadata of the spline - pub metadata: SplineMeta, - // use AsBytes / FromBytes from "zerocopy" crate to load the data ? - /// Stores the CRC32 checksum of the data octet string. - pub data_checksum: u32, // TODO: move the checksum into a CRC32DataArray to check integrity on load - /// The data as a packed struct of octets - pub data: &'a [u8], -} - -impl<'a> Encode for Splines<'a> { - fn encoded_len(&self) -> der::Result { - self.metadata.encoded_len()? - + self.data_checksum.encoded_len()? - + OctetStringRef::new(self.data).unwrap().encoded_len()? - } - - fn encode(&self, encoder: &mut impl Writer) -> der::Result<()> { - self.metadata.encode(encoder)?; - self.data_checksum.encode(encoder)?; - OctetStringRef::new(self.data).unwrap().encode(encoder) - } -} - -impl<'a> Decode<'a> for Splines<'a> { - fn decode>(decoder: &mut R) -> der::Result { - let metadata = decoder.decode()?; - let data_checksum = decoder.decode()?; - let data_bytes: OctetStringRef = decoder.decode()?; - Ok(Self { - metadata, - data_checksum, - data: data_bytes.as_bytes(), - }) - } -} diff --git a/src/structure/spline/statekind.rs b/src/structure/spline/statekind.rs deleted file mode 100644 index 4ea0b5a1..00000000 --- a/src/structure/spline/statekind.rs +++ /dev/null @@ -1,173 +0,0 @@ -/* - * ANISE Toolkit - * Copyright (C) 2021-2022 Christopher Rabotin et al. (cf. AUTHORS.md) - * This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at https://mozilla.org/MPL/2.0/. - * - * Documentation: https://nyxspace.com/ - */ -use der::{Decode, Encode, Reader, Writer}; - -use crate::DBL_SIZE; - -/// State Kind defines what kind of state is stored in the spline. -/// -/// # Limitations -/// 1. The interpolation degree of all items in the state must be identical. -/// 2. A constant position must be encoded as degree 1 whose sole coefficient is the constant value. -/// -/// ## Example -/// If the position is interpolated with an 11 degree polynomial, and the velocity must also be interpolated with an 11 degree polynomial. -/// -/// # Encoding / decoding -/// The state kind is encoded along with its degree as a single field in the ASN1 encoding scheme. -/// The interpolation degrees are expressed on an 8-bit unsigned integer whose maximum value is 255 (2^8 - 1). -/// Hence, to encode both the state kind and the interpolation degree, a spacing of 255 is used between each state kind. -/// -/// ASN1 encodes the tag and length as one octet each. Hence, position state metadata will always fit in exactly three octets: tag (1), length (1), degree (1). -/// Position and velocity data will fit in four octets: tag (1), length (1), data (2). And so on for each state kind. -/// Had the degree and state kind been stored as separate fields, we would be constantly using exactly six octets. -/// -/// The other advantage is that a single ASN1 tag decoding will yield both the state kind and the degree: this allows the code to store both the state kind and the degree in the same enumerate structure. -/// -/// ## Example -/// -/// | Encoded value | State Kind | Degree | -/// | -- | -- | -- | -/// | 1 | Position only | 1 -/// | 28 | Position only | 28 -/// | 266 | Position and Velocity | 266-255 = 11 -/// | 0 | None | _not applicable_ -/// -/// # Storage -/// -/// Position data will always require three fields (x, y, z). Velocity adds another three (vx, vy, vz), and so does acceleration (ax, ay, az). -/// -/// ## Example -/// Storing the position and velocity with an 11 degree polynomial will require 11*6 = 66 coefficient. Each coefficient is stored as packed structure of 8 octets floating point values in IEEE754 format. -/// Hence, this would require 66*8 = 528 octets per spline. -/// -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)] -pub enum StateKind { - /// No state data at all, i.e. this spline only has covariance information - None, - Position { - degree: u8, - }, - PositionVelocity { - degree: u8, - }, - PositionVelocityAcceleration { - degree: u8, - }, -} - -impl StateKind { - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Returns the length in octets required to store this covariance information - pub fn len(&self) -> usize { - let num_items = match self { - StateKind::None => 0, - StateKind::Position { degree } => degree * 3, - StateKind::PositionVelocity { degree } => degree * 6, - StateKind::PositionVelocityAcceleration { degree } => degree * 9, - }; - DBL_SIZE * (num_items as usize) - } - - /// Returns the interpolation degree - pub const fn degree(&self) -> u8 { - match self { - StateKind::None => 0, - StateKind::Position { degree } => *degree, - StateKind::PositionVelocity { degree } => *degree, - StateKind::PositionVelocityAcceleration { degree } => *degree, - } - } - - /// Returns whether this state kind includes the position interpolation coefficients - pub const fn includes_position(&self) -> bool { - !matches!(self, Self::None) - } - - /// Returns whether this state kind includes velocity interpolation coefficients - pub const fn includes_velocity(&self) -> bool { - matches!( - self, - Self::PositionVelocity { degree: _ } | Self::PositionVelocityAcceleration { degree: _ } - ) - } - - /// Returns whether this state kind includes acceleration interpolation coefficients. - pub const fn includes_acceleration(&self) -> bool { - matches!(self, Self::PositionVelocityAcceleration { degree: _ }) - } -} - -impl Default for StateKind { - fn default() -> Self { - Self::None - } -} - -/// Allows conversion of the StateKind into a u8 with the following mapping. -impl From for u16 { - fn from(kind: StateKind) -> Self { - match kind { - StateKind::None => 0, - StateKind::Position { degree } => degree.into(), - StateKind::PositionVelocity { degree } => (u8::MAX + degree).into(), - StateKind::PositionVelocityAcceleration { degree } => { - 2_u16 * (u8::MAX as u16) + (degree as u16) - } - } - } -} - -impl From<&StateKind> for u16 { - fn from(kind: &StateKind) -> Self { - u16::from(*kind) - } -} - -/// Allows conversion of a u8 into a StateKind. -impl From for StateKind { - fn from(val: u16) -> Self { - if val == 0 { - Self::None - } else { - // Prevents an overflow and coerces the degree to be within the bounds of a u8, as per the specs. - let degree = (val % (u8::MAX as u16)) as u8; - if val < u8::MAX.into() { - Self::Position { degree } - } else if val < 2 * (u8::MAX as u16) { - Self::PositionVelocity { degree } - } else { - Self::PositionVelocityAcceleration { degree } - } - } - } -} - -impl Encode for StateKind { - fn encoded_len(&self) -> der::Result { - let converted: u16 = self.into(); - converted.encoded_len() - } - - fn encode(&self, encoder: &mut impl Writer) -> der::Result<()> { - let converted: u16 = self.into(); - converted.encode(encoder) - } -} - -impl<'a> Decode<'a> for StateKind { - fn decode>(decoder: &mut R) -> der::Result { - let converted: u16 = decoder.decode()?; - Ok(Self::from(converted)) - } -} diff --git a/tests/astro/mod.rs b/tests/astro/mod.rs index 9c11bfa5..2d79e173 100644 --- a/tests/astro/mod.rs +++ b/tests/astro/mod.rs @@ -1 +1 @@ -// mod orbit; +mod orbit; diff --git a/tests/astro/orbit.rs b/tests/astro/orbit.rs index da176aa0..7438ef27 100644 --- a/tests/astro/orbit.rs +++ b/tests/astro/orbit.rs @@ -1,36 +1,46 @@ extern crate pretty_env_logger as pel; use anise::astro::orbit::Orbit; -use anise::astro::orbit_geodetic::GeodeticOrbit; -use anise::file_mmap; +use anise::constants::frames::EARTH_J2000; use anise::math::angles::{between_0_360, between_pm_180}; +use anise::naif::kpl::parser::convert_tpc; use anise::prelude::*; use anise::time::{Epoch, Unit}; +use rstest::*; + +#[fixture] +fn almanac<'a>() -> Almanac<'a> { + let mut ctx = Almanac::default(); + + ctx.planetary_data = convert_tpc("data/pck00008.tpc", "data/gm_de431.tpc").unwrap(); + ctx +} + macro_rules! f64_eq { ($x:expr, $val:expr, $msg:expr) => { assert!( ($x - $val).abs() < 1e-10, - "{}: {:.2e}", + "{}: {:.2e}\tgot: {}\twant: {}", $msg, - ($x - $val).abs() + ($x - $val).abs(), + $x, + $val ) }; } -#[test] -fn state_def_circ_inc() { - let path = "./data/de438s.anise"; - let buf = file_mmap!(path).unwrap(); - let ctx = DataSet::try_from_bytes(&buf).unwrap(); - - let eme2k = ctx.celestial_frame("Earth", "J2000").unwrap(); +#[rstest] +fn val_state_def_circ_inc(almanac: Almanac) { + let mut eme2k = almanac.frame_from_uid(EARTH_J2000).unwrap(); + // Set the GM value from the GMAT data since we're validating the calculations against GMAT. + eme2k.mu_km3_s2 = Some(398_600.4415); let epoch = Epoch::from_mjd_tai(21_545.0); - let cart = Orbit::cartesian( + let cart = Orbit::new( -2436.45, -2436.45, 6891.037, 5.088_611, -5.088_611, 0.0, epoch, eme2k, ); - let cart2 = Orbit::cartesian( + let cart2 = Orbit::new( -2436.45, -2436.45, 6891.037, @@ -50,28 +60,33 @@ fn state_def_circ_inc() { f64_eq!(cart.velocity_km_s.x, 5.088_611, "vx"); f64_eq!(cart.velocity_km_s.y, -5.088_611, "vy"); f64_eq!(cart.velocity_km_s.z, 0.0, "vz"); - f64_eq!(cart.energy_km2_s2(), -25.842_247_282_849_137, "energy"); + f64_eq!( + cart.energy_km2_s2().unwrap(), + -25.842_247_282_849_137, + "energy" + ); + assert_eq!( - cart.period(), - 6_740.269_063_643_045 * Unit::Second, + cart.period().unwrap(), + 6_740.269_063_641 * Unit::Second, "period" ); - f64_eq!(cart.hx(), 35_065.806_679_607_005, "HX"); - f64_eq!(cart.hy(), 35_065.806_679_607_005, "HY"); - f64_eq!(cart.hz(), 24_796.292_541_9, "HZ"); - f64_eq!(cart.sma_km(), 7_712.186_117_895_043, "sma"); - f64_eq!(cart.ecc(), 0.000_999_582_831_432_052_5, "ecc"); - f64_eq!(cart.inc_deg(), 63.434_003_407_751_14, "inc"); - f64_eq!(cart.raan_deg(), 135.0, "raan"); - f64_eq!(cart.aop_deg(), 90.0, "aop"); - f64_eq!(cart.ta_deg(), 0.0, "ta"); - f64_eq!(cart.tlong_deg(), 225.0, "tlong"); - f64_eq!(cart.ea_deg(), 0.0, "ea"); - f64_eq!(cart.ma_deg(), 0.0, "ma"); - f64_eq!(cart.apoapsis_km(), 7_719.895_086_731_299, "apo"); - f64_eq!(cart.periapsis_km(), 7_704.477_149_058_786, "peri"); + f64_eq!(cart.hx().unwrap(), 35_065.806_679_607_005, "HX"); + f64_eq!(cart.hy().unwrap(), 35_065.806_679_607_005, "HY"); + f64_eq!(cart.hz().unwrap(), 24_796.292_541_9, "HZ"); + f64_eq!(cart.sma_km().unwrap(), 7_712.186_117_895_043, "sma"); + f64_eq!(cart.ecc().unwrap(), 0.000_999_582_831_432_052_5, "ecc"); + f64_eq!(cart.inc_deg().unwrap(), 63.434_003_407_751_14, "inc"); + f64_eq!(cart.raan_deg().unwrap(), 135.0, "raan"); + f64_eq!(cart.aop_deg().unwrap(), 90.0, "aop"); + f64_eq!(cart.ta_deg().unwrap(), 0.0, "ta"); + f64_eq!(cart.tlong_deg().unwrap(), 225.0, "tlong"); + f64_eq!(cart.ea_deg().unwrap(), 0.0, "ea"); + f64_eq!(cart.ma_deg().unwrap(), 0.0, "ma"); + f64_eq!(cart.apoapsis_km().unwrap(), 7_719.895_086_731_299, "apo"); + f64_eq!(cart.periapsis_km().unwrap(), 7_704.477_149_058_786, "peri"); f64_eq!( - cart.semi_parameter_km(), + cart.semi_parameter_km().unwrap(), 7_712.178_412_142_147, "semi parameter" ); @@ -85,24 +100,32 @@ fn state_def_circ_inc() { f64_eq!(kep.velocity_km_s.x, -0.166_470_488_584_076_31, "vx"); f64_eq!(kep.velocity_km_s.y, 6.913_868_638_275_646_5, "vy"); f64_eq!(kep.velocity_km_s.z, 0.910_157_981_443_279_1, "vz"); - f64_eq!(kep.sma_km(), 8_191.929_999_999_999, "sma"); - f64_eq!(kep.ecc(), 1.000_000_000_388_51e-06, "ecc"); - f64_eq!(kep.inc_deg(), 12.849_999_999_999_987, "inc"); - f64_eq!(kep.raan_deg(), 306.614, "raan"); - f64_eq!(kep.aop_deg(), 314.189_999_994_618_1, "aop"); - f64_eq!(kep.ta_deg(), 99.887_700_005_381_9, "ta"); - f64_eq!(kep.energy_km2_s2(), -24.328_848_116_377_95, "energy"); - assert_eq!(kep.period(), 7_378.877_993_957_958 * Unit::Second, "period"); - f64_eq!(kep.hx(), -10_200.784_799_426_574, "HX"); - f64_eq!(kep.hy(), -7_579.639_346_783_497, "HY"); - f64_eq!(kep.hz(), 55_711.757_929_384_25, "HZ"); - f64_eq!(kep.tlong_deg(), 0.691_700_000_000_082_6, "tlong"); - f64_eq!(kep.ea_deg(), 99.887_643_560_656_85, "ea"); - f64_eq!(kep.ma_deg(), 99.887_587_115_926_96, "ma"); - f64_eq!(kep.apoapsis_km(), 8_191.938_191_930_002, "apo"); - f64_eq!(kep.periapsis_km(), 8_191.921_808_069_997, "peri"); + f64_eq!(kep.sma_km().unwrap(), 8_191.929_999_999_999, "sma"); + f64_eq!(kep.ecc().unwrap(), 1.000_000_000_388_51e-06, "ecc"); + f64_eq!(kep.inc_deg().unwrap(), 12.849_999_999_999_987, "inc"); + f64_eq!(kep.raan_deg().unwrap(), 306.614, "raan"); + f64_eq!(kep.aop_deg().unwrap(), 314.189_999_994_618_1, "aop"); + f64_eq!(kep.ta_deg().unwrap(), 99.887_700_005_381_9, "ta"); + f64_eq!( + kep.energy_km2_s2().unwrap(), + -24.328_848_116_377_95, + "energy" + ); + assert_eq!( + kep.period().unwrap(), + 7_378.877_993_955 * Unit::Second, + "period" + ); + f64_eq!(kep.hx().unwrap(), -10_200.784_799_426_574, "HX"); + f64_eq!(kep.hy().unwrap(), -7_579.639_346_783_497, "HY"); + f64_eq!(kep.hz().unwrap(), 55_711.757_929_384_25, "HZ"); + f64_eq!(kep.tlong_deg().unwrap(), 0.691_700_000_000_082_6, "tlong"); + f64_eq!(kep.ea_deg().unwrap(), 99.887_643_560_656_85, "ea"); + f64_eq!(kep.ma_deg().unwrap(), 99.887_587_115_926_96, "ma"); + f64_eq!(kep.apoapsis_km().unwrap(), 8_191.938_191_930_002, "apo"); + f64_eq!(kep.periapsis_km().unwrap(), 8_191.921_808_069_997, "peri"); f64_eq!( - kep.semi_parameter_km(), + kep.semi_parameter_km().unwrap(), 8_191.929_999_991_808, "semi parameter" ); @@ -110,7 +133,7 @@ fn state_def_circ_inc() { let kep = Orbit::keplerian( 8_191.93, 0.2, 12.85, 306.614, 314.19, -99.887_7, epoch, eme2k, ); - f64_eq!(kep.ta_deg(), 260.1123, "ta"); + f64_eq!(kep.ta_deg().unwrap(), 260.1123, "ta"); // // Test that DCMs are valid // let dcm = kep.dcm_from_traj_frame(Frame::VNC).unwrap(); @@ -126,16 +149,14 @@ fn state_def_circ_inc() { // assert!(((dcm.transpose() * dcm).determinant() - 1.0).abs() < 1e-12); } -#[test] -fn state_def_elliptical() { - let path = "./data/de438s.anise"; - let buf = file_mmap!(path).unwrap(); - let ctx = DataSet::try_from_bytes(&buf).unwrap(); - - let eme2k = ctx.celestial_frame("Earth", "J2000").unwrap(); +#[rstest] +fn val_state_def_elliptical(almanac: Almanac) { + let mut eme2k = almanac.frame_from_uid(EARTH_J2000).unwrap(); + // Set the GM value from the GMAT data since we're validating the calculations against GMAT. + eme2k.mu_km3_s2 = Some(398_600.4415); let epoch = Epoch::from_mjd_tai(21_545.0); - let cart = Orbit::cartesian( + let cart = Orbit::new( 5_946.673_548_288_958, 1_656.154_606_023_661, 2_259.012_129_598_249, @@ -145,28 +166,32 @@ fn state_def_elliptical() { epoch, eme2k, ); - f64_eq!(cart.energy_km2_s2(), -25.842_247_282_849_144, "energy"); + f64_eq!( + cart.energy_km2_s2().unwrap(), + -25.842_247_282_849_144, + "energy" + ); assert_eq!( - cart.period(), - 6_740.269_063_643_042_5 * Unit::Second, + cart.period().unwrap(), + 6_740.269_063_641 * Unit::Second, "period" ); - f64_eq!(cart.hx(), 0.015_409_898_034_704_383, "HX"); - f64_eq!(cart.hy(), -44_146.106_010_690_01, "HY"); - f64_eq!(cart.hz(), 32_364.892_694_481_765, "HZ"); - f64_eq!(cart.sma_km(), 7_712.186_117_895_041, "sma"); - f64_eq!(cart.ecc(), 0.158_999_999_999_999_95, "ecc"); - f64_eq!(cart.inc_deg(), 53.753_69, "inc"); - f64_eq!(cart.raan_deg(), 1.998_632_864_211_17e-05, "raan"); - f64_eq!(cart.aop_deg(), 359.787_880_000_004, "aop"); - f64_eq!(cart.ta_deg(), 25.434_003_407_751_188, "ta"); - f64_eq!(cart.tlong_deg(), 25.221_903_394_083_824, "tlong"); - f64_eq!(cart.ea_deg(), 21.763_052_882_584_79, "ea"); - f64_eq!(cart.ma_deg(), 18.385_336_330_516_39, "ma"); - f64_eq!(cart.apoapsis_km(), 8_938.423_710_640_353, "apo"); - f64_eq!(cart.periapsis_km(), 6_485.948_525_149_73, "peri"); + f64_eq!(cart.hx().unwrap(), 0.015_409_898_034_704_383, "HX"); + f64_eq!(cart.hy().unwrap(), -44_146.106_010_690_01, "HY"); + f64_eq!(cart.hz().unwrap(), 32_364.892_694_481_765, "HZ"); + f64_eq!(cart.sma_km().unwrap(), 7_712.186_117_895_041, "sma"); + f64_eq!(cart.ecc().unwrap(), 0.158_999_999_999_999_95, "ecc"); + f64_eq!(cart.inc_deg().unwrap(), 53.753_69, "inc"); + f64_eq!(cart.raan_deg().unwrap(), 1.998_632_864_211_17e-05, "raan"); + f64_eq!(cart.aop_deg().unwrap(), 359.787_880_000_004, "aop"); + f64_eq!(cart.ta_deg().unwrap(), 25.434_003_407_751_188, "ta"); + f64_eq!(cart.tlong_deg().unwrap(), 25.221_903_394_083_824, "tlong"); + f64_eq!(cart.ea_deg().unwrap(), 21.763_052_882_584_79, "ea"); + f64_eq!(cart.ma_deg().unwrap(), 18.385_336_330_516_39, "ma"); + f64_eq!(cart.apoapsis_km().unwrap(), 8_938.423_710_640_353, "apo"); + f64_eq!(cart.periapsis_km().unwrap(), 6_485.948_525_149_73, "peri"); f64_eq!( - cart.semi_parameter_km(), + cart.semi_parameter_km().unwrap(), 7_517.214_340_648_537, "semi parameter" ); @@ -180,24 +205,32 @@ fn state_def_elliptical() { f64_eq!(kep.velocity_km_s.x, -0.000_168_592_186_843_952_16, "vx"); f64_eq!(kep.velocity_km_s.y, 6.886_845_792_370_852, "vy"); f64_eq!(kep.velocity_km_s.z, 0.936_931_260_302_891_8, "vz"); - f64_eq!(kep.sma_km(), 8_191.930_000_000_003, "sma"); - f64_eq!(kep.ecc(), 0.024_500_000_000_000_348, "ecc"); - f64_eq!(kep.inc_deg(), 12.850_000_000_000_016, "inc"); - f64_eq!(kep.raan_deg(), 306.614, "raan"); - f64_eq!(kep.aop_deg(), 314.190_000_000_000_4, "aop"); - f64_eq!(kep.ta_deg(), 99.887_699_999_999_58, "ta"); - f64_eq!(kep.energy_km2_s2(), -24.328_848_116_377_94, "energy"); - assert_eq!(kep.period(), 7_378.877_993_957_964 * Unit::Second, "period"); - f64_eq!(kep.hx(), -10_197.722_829_337_885, "HX"); - f64_eq!(kep.hy(), -7_577.364_166_057_776, "HY"); - f64_eq!(kep.hz(), 55_695.034_928_191_49, "HZ"); - f64_eq!(kep.tlong_deg(), 0.691_699_999_999_855_2, "tlong"); - f64_eq!(kep.ea_deg(), 98.501_748_370_880_22, "ea"); - f64_eq!(kep.ma_deg(), 97.113_427_049_323_43, "ma"); - f64_eq!(kep.apoapsis_km(), 8_392.632_285_000_007, "apo"); - f64_eq!(kep.periapsis_km(), 7_991.227_715_000_001, "peri"); + f64_eq!(kep.sma_km().unwrap(), 8_191.930_000_000_003, "sma"); + f64_eq!(kep.ecc().unwrap(), 0.024_500_000_000_000_348, "ecc"); + f64_eq!(kep.inc_deg().unwrap(), 12.850_000_000_000_016, "inc"); + f64_eq!(kep.raan_deg().unwrap(), 306.614, "raan"); + f64_eq!(kep.aop_deg().unwrap(), 314.190_000_000_000_4, "aop"); + f64_eq!(kep.ta_deg().unwrap(), 99.887_699_999_999_58, "ta"); + f64_eq!( + kep.energy_km2_s2().unwrap(), + -24.328_848_116_377_94, + "energy" + ); + assert_eq!( + kep.period().unwrap(), + 7_378.877_993_955 * Unit::Second, + "period" + ); + f64_eq!(kep.hx().unwrap(), -10_197.722_829_337_885, "HX"); + f64_eq!(kep.hy().unwrap(), -7_577.364_166_057_776, "HY"); + f64_eq!(kep.hz().unwrap(), 55_695.034_928_191_49, "HZ"); + f64_eq!(kep.tlong_deg().unwrap(), 0.691_699_999_999_855_2, "tlong"); + f64_eq!(kep.ea_deg().unwrap(), 98.501_748_370_880_22, "ea"); + f64_eq!(kep.ma_deg().unwrap(), 97.113_427_049_323_43, "ma"); + f64_eq!(kep.apoapsis_km().unwrap(), 8_392.632_285_000_007, "apo"); + f64_eq!(kep.periapsis_km().unwrap(), 7_991.227_715_000_001, "peri"); f64_eq!( - kep.semi_parameter_km(), + kep.semi_parameter_km().unwrap(), 8_187.012_794_017_503, "semi parameter" ); @@ -216,16 +249,14 @@ fn state_def_elliptical() { // assert!(((dcm.transpose() * dcm).determinant() - 1.0).abs() < 1e-12); } -#[test] -fn state_def_circ_eq() { - let path = "./data/de438s.anise"; - let buf = file_mmap!(path).unwrap(); - let ctx = DataSet::try_from_bytes(&buf).unwrap(); - - let eme2k = ctx.celestial_frame("Earth", "J2000").unwrap(); +#[rstest] +fn val_state_def_circ_eq(almanac: Almanac) { + let mut eme2k = almanac.frame_from_uid(EARTH_J2000).unwrap(); + // Set the GM value from the GMAT data since we're validating the calculations against GMAT. + eme2k.mu_km3_s2 = Some(398_600.4415); let epoch = Epoch::from_mjd_tai(21_545.0); - let cart = Orbit::cartesian( + let cart = Orbit::new( -38_892.724_449_149_02, 16_830.384_772_891_86, 0.722_659_929_135_562_2, @@ -235,24 +266,32 @@ fn state_def_circ_eq() { epoch, eme2k, ); - f64_eq!(cart.energy_km2_s2(), -4.702_902_670_552_006, "energy"); - assert_eq!(cart.period(), 86_820.776_152_986_1 * Unit::Second, "period"); - f64_eq!(cart.hx(), 2.225_951_522_241_969_5, "HX"); - f64_eq!(cart.hy(), -0.436_714_326_090_944_6, "HY"); - f64_eq!(cart.hz(), 129_969.001_391_865_75, "HZ"); - f64_eq!(cart.sma_km(), 42_378.129_999_999_98, "sma"); - f64_eq!(cart.ecc(), 9.999_999_809_555_511e-9, "ecc"); - f64_eq!(cart.inc_deg(), 0.001_000_000_401_564_538_6, "inc"); - f64_eq!(cart.raan_deg(), 78.9, "raan"); - f64_eq!(cart.aop_deg(), 65.399_999_847_186_78, "aop"); - f64_eq!(cart.ta_deg(), 12.300_000_152_813_197, "ta"); - f64_eq!(cart.tlong_deg(), 156.599_999_999_999_97, "tlong"); - f64_eq!(cart.ea_deg(), 12.300_000_030_755_777, "ea"); - f64_eq!(cart.ma_deg(), 12.299_999_908_698_359, "ma"); - f64_eq!(cart.apoapsis_km(), 42_378.130_423_781_27, "apo"); - f64_eq!(cart.periapsis_km(), 42_378.129_576_218_69, "peri"); f64_eq!( - cart.semi_parameter_km(), + cart.energy_km2_s2().unwrap(), + -4.702_902_670_552_006, + "energy" + ); + assert_eq!( + cart.period().unwrap(), + 86_820.776_152_981 * Unit::Second, + "period" + ); + f64_eq!(cart.hx().unwrap(), 2.225_951_522_241_969_5, "HX"); + f64_eq!(cart.hy().unwrap(), -0.436_714_326_090_944_6, "HY"); + f64_eq!(cart.hz().unwrap(), 129_969.001_391_865_75, "HZ"); + f64_eq!(cart.sma_km().unwrap(), 42_378.129_999_999_98, "sma"); + f64_eq!(cart.ecc().unwrap(), 9.999_999_809_555_511e-9, "ecc"); + f64_eq!(cart.inc_deg().unwrap(), 0.001_000_000_401_564_538_6, "inc"); + f64_eq!(cart.raan_deg().unwrap(), 78.9, "raan"); + f64_eq!(cart.aop_deg().unwrap(), 65.399_999_847_186_78, "aop"); + f64_eq!(cart.ta_deg().unwrap(), 12.300_000_152_813_197, "ta"); + f64_eq!(cart.tlong_deg().unwrap(), 156.599_999_999_999_97, "tlong"); + f64_eq!(cart.ea_deg().unwrap(), 12.300_000_030_755_777, "ea"); + f64_eq!(cart.ma_deg().unwrap(), 12.299_999_908_698_359, "ma"); + f64_eq!(cart.apoapsis_km().unwrap(), 42_378.130_423_781_27, "apo"); + f64_eq!(cart.periapsis_km().unwrap(), 42_378.129_576_218_69, "peri"); + f64_eq!( + cart.semi_parameter_km().unwrap(), 42_378.129_999_999_976, "semi parameter" ); @@ -264,28 +303,32 @@ fn state_def_circ_eq() { f64_eq!(kep.velocity_km_s.x, 0.030_396_440_130_264_88, "vx"); f64_eq!(kep.velocity_km_s.y, 4.680_909_107_924_576, "vy"); f64_eq!(kep.velocity_km_s.z, 4.907_089_816_726_583e-8, "vz"); - f64_eq!(kep.sma_km(), 18_191.098_000_000_013, "sma"); - f64_eq!(kep.ecc(), 9.999_999_997_416_087e-7, "ecc"); - f64_eq!(kep.inc_deg(), 1.207_418_269_725_733_3e-6, "inc"); - f64_eq!(kep.raan_deg(), 306.543, "raan"); - f64_eq!(kep.aop_deg(), 314.320_000_025_403_66, "aop"); - f64_eq!(kep.ta_deg(), 98.764_999_974_596_28, "ta"); - f64_eq!(kep.energy_km2_s2(), -10.955_920_349_063_035, "energy"); + f64_eq!(kep.sma_km().unwrap(), 18_191.098_000_000_013, "sma"); + f64_eq!(kep.ecc().unwrap(), 9.999_999_997_416_087e-7, "ecc"); + f64_eq!(kep.inc_deg().unwrap(), 1.207_418_269_725_733_3e-6, "inc"); + f64_eq!(kep.raan_deg().unwrap(), 306.543, "raan"); + f64_eq!(kep.aop_deg().unwrap(), 314.320_000_025_403_66, "aop"); + f64_eq!(kep.ta_deg().unwrap(), 98.764_999_974_596_28, "ta"); + f64_eq!( + kep.energy_km2_s2().unwrap(), + -10.955_920_349_063_035, + "energy" + ); assert_eq!( - kep.period(), - 24_417.396_242_570_256 * Unit::Second, + kep.period().unwrap(), + 24_417.396_242_566 * Unit::Second, "period" ); - f64_eq!(kep.hx(), -0.001_194_024_028_558_358_7, "HX"); - f64_eq!(kep.hy(), -0.000_884_918_835_027_750_6, "HY"); - f64_eq!(kep.hz(), 85_152.684_597_507_06, "HZ"); - f64_eq!(kep.tlong_deg(), 359.627_999_999_999_93, "tlong"); - f64_eq!(kep.ea_deg(), 98.764_943_347_932_57, "ea"); - f64_eq!(kep.ma_deg(), 98.764_886_721_264_56, "ma"); - f64_eq!(kep.apoapsis_km(), 18_191.116_191_098_008, "apo"); - f64_eq!(kep.periapsis_km(), 18_191.079_808_902_017, "peri"); + f64_eq!(kep.hx().unwrap(), -0.001_194_024_028_558_358_7, "HX"); + f64_eq!(kep.hy().unwrap(), -0.000_884_918_835_027_750_6, "HY"); + f64_eq!(kep.hz().unwrap(), 85_152.684_597_507_06, "HZ"); + f64_eq!(kep.tlong_deg().unwrap(), 359.627_999_999_999_93, "tlong"); + f64_eq!(kep.ea_deg().unwrap(), 98.764_943_347_932_57, "ea"); + f64_eq!(kep.ma_deg().unwrap(), 98.764_886_721_264_56, "ma"); + f64_eq!(kep.apoapsis_km().unwrap(), 18_191.116_191_098_008, "apo"); + f64_eq!(kep.periapsis_km().unwrap(), 18_191.079_808_902_017, "peri"); f64_eq!( - kep.semi_parameter_km(), + kep.semi_parameter_km().unwrap(), 18_191.097_999_981_823, "semi parameter" ); @@ -304,16 +347,14 @@ fn state_def_circ_eq() { // assert!(((dcm.transpose() * dcm).determinant() - 1.0).abs() < 1e-12); } -#[test] -fn state_def_equatorial() { - let path = "./data/de438s.anise"; - let buf = file_mmap!(path).unwrap(); - let ctx = DataSet::try_from_bytes(&buf).unwrap(); - - let eme2k = ctx.celestial_frame("Earth", "J2000").unwrap(); +#[rstest] +fn val_state_def_equatorial(almanac: Almanac) { + let mut eme2k = almanac.frame_from_uid(EARTH_J2000).unwrap(); + // Set the GM value from the GMAT data since we're validating the calculations against GMAT. + eme2k.mu_km3_s2 = Some(398_600.4415); let epoch = Epoch::from_mjd_tai(21_545.0); - let cart = Orbit::cartesian( + let cart = Orbit::new( -7273.338970882, 253.990592670, 0.022164861, @@ -324,26 +365,24 @@ fn state_def_equatorial() { eme2k, ); - f64_eq!(cart.sma_km(), 7278.136188379306, "sma"); - f64_eq!(cart.ecc(), 4.99846643158263e-05, "ecc"); - f64_eq!(cart.inc_deg(), 0.005000000478594339, "inc"); - f64_eq!(cart.raan_deg(), 360.0, "raan"); - f64_eq!(cart.aop_deg(), 177.9999736473912, "aop"); - f64_eq!(cart.ta_deg(), 2.650826247094554e-05, "ta"); + f64_eq!(cart.sma_km().unwrap(), 7278.136188379306, "sma"); + f64_eq!(cart.ecc().unwrap(), 4.99846643158263e-05, "ecc"); + f64_eq!(cart.inc_deg().unwrap(), 0.005000000478594339, "inc"); + f64_eq!(cart.raan_deg().unwrap(), 360.0, "raan"); + f64_eq!(cart.aop_deg().unwrap(), 177.9999736473912, "aop"); + f64_eq!(cart.ta_deg().unwrap(), 2.650826247094554e-05, "ta"); } -#[test] -fn state_def_reciprocity() { - let path = "./data/de438s.anise"; - let buf = file_mmap!(path).unwrap(); - let ctx = DataSet::try_from_bytes(&buf).unwrap(); - - let eme2k = ctx.celestial_frame("Earth", "J2000").unwrap(); +#[rstest] +fn val_state_def_reciprocity(almanac: Almanac) { + let mut eme2k = almanac.frame_from_uid(EARTH_J2000).unwrap(); + // Set the GM value from the GMAT data since we're validating the calculations against GMAT. + eme2k.mu_km3_s2 = Some(398_600.4415); let epoch = Epoch::from_mjd_tai(21_545.0); assert_eq!( - Orbit::cartesian( + Orbit::new( -38_892.724_449_149_02, 16_830.384_772_891_86, 0.722_659_929_135_562_2, @@ -367,7 +406,7 @@ fn state_def_reciprocity() { ); assert_eq!( - Orbit::cartesian( + Orbit::new( 5_946.673_548_288_958, 1_656.154_606_023_661, 2_259.012_129_598_249, @@ -391,7 +430,7 @@ fn state_def_reciprocity() { ); assert_eq!( - Orbit::cartesian(-2436.45, -2436.45, 6891.037, 5.088_611, -5.088_611, 0.0, epoch, eme2k), + Orbit::new(-2436.45, -2436.45, 6891.037, 5.088_611, -5.088_611, 0.0, epoch, eme2k), Orbit::keplerian( 7_712.186_117_895_043, 0.000_999_582_831_432_052_5, @@ -406,13 +445,9 @@ fn state_def_reciprocity() { ); } -#[test] -fn geodetic_vallado() { - let path = "./data/de438s.anise"; - let buf = file_mmap!(path).unwrap(); - let ctx = DataSet::try_from_bytes(&buf).unwrap(); - - let eme2k = ctx.geodetic_frame("Earth", "J2000").unwrap(); +#[rstest] +fn verif_geodetic_vallado(almanac: Almanac) { + let eme2k = almanac.frame_from_uid(EARTH_J2000).unwrap(); let epoch = Epoch::from_mjd_tai(51_545.0); // Test case from Vallado, 4th Ed., page 173, Example 3-3 @@ -421,22 +456,23 @@ fn geodetic_vallado() { let rj = 6862.875; let rj_val = 6_862.874_999_999_999; let rk = 6448.296; - let lat = 34.352_495_139_917_26; // Valldo: 34.352496 + let lat = 34.352_519_916_935_62; // Valldo: 34.352496 let long = 46.446_416_856_789_96; // Vallado 46.4464 - let height = 5_085.219_430_345_17; // Vallado: 5085.22 - let r = GeodeticOrbit::from_position(ri, rj, rk, epoch, eme2k); - f64_eq!(r.geodetic_latitude(), lat, "latitude (φ)"); + let height = 5_085.217_419_357_936; // Vallado: 5085.22 + let r = Orbit::from_position(ri, rj, rk, epoch, eme2k); + f64_eq!(r.geodetic_latitude().unwrap(), lat, "latitude (φ)"); f64_eq!(r.geodetic_longitude(), long, "longitude (λ)"); - f64_eq!(r.geodetic_height(), height, "height"); + f64_eq!(r.geodetic_height().unwrap(), height, "height"); let mean_earth_angular_velocity_deg_s = 0.004178079012116429; - let r = GeodeticOrbit::from_altlatlong( + let r = Orbit::from_altlatlong( lat, long, height, mean_earth_angular_velocity_deg_s, epoch, eme2k, - ); + ) + .unwrap(); f64_eq!(r.radius_km.x, ri_val, "r_i"); f64_eq!(r.radius_km.y, rj_val, "r_j"); f64_eq!(r.radius_km.z, rk, "r_k"); @@ -447,97 +483,126 @@ fn geodetic_vallado() { let long = 345.5975; let height = 56.0e-3; let height_val = 0.056_000_000_000_494_765; - let ri = 6_119.399_587_411_616; - let rj = -1_571.479_380_333_195; - let rk = -871.561_161_926_003_9; - let r = GeodeticOrbit::from_altlatlong( + let ri = 6_119.4032_332_711_09; + let rj = -1_571.480_316_600_378_3; + let rk = -871.560_226_712_024_7; + let r = Orbit::from_altlatlong( lat, long, height, mean_earth_angular_velocity_deg_s, epoch, eme2k, - ); + ) + .unwrap(); f64_eq!(r.radius_km.x, ri, "r_i"); f64_eq!(r.radius_km.y, rj, "r_j"); f64_eq!(r.radius_km.z, rk, "r_k"); - let r = GeodeticOrbit::from_position(ri, rj, rk, epoch, eme2k); - f64_eq!(r.geodetic_latitude(), lat_val, "latitude (φ)"); + let r = Orbit::from_position(ri, rj, rk, epoch, eme2k); + f64_eq!(r.geodetic_latitude().unwrap(), lat_val, "latitude (φ)"); f64_eq!(r.geodetic_longitude(), long, "longitude (λ)"); - f64_eq!(r.geodetic_height(), height_val, "height"); -} + f64_eq!(r.geodetic_height().unwrap(), height_val, "height"); -#[test] -fn with_init() { - let path = "./data/de438s.anise"; - let buf = file_mmap!(path).unwrap(); - let ctx = DataSet::try_from_bytes(&buf).unwrap(); + // Check reciprocity near poles + let r = Orbit::from_altlatlong( + 0.1, + long, + height_val, + mean_earth_angular_velocity_deg_s, + epoch, + eme2k, + ) + .unwrap(); + f64_eq!(r.geodetic_latitude().unwrap(), 0.1, "latitude (φ)"); +} - let eme2k = ctx.celestial_frame("Earth", "J2000").unwrap(); +#[rstest] +fn verif_with_init(almanac: Almanac) { + let eme2k = almanac.frame_from_uid(EARTH_J2000).unwrap(); let epoch = Epoch::from_gregorian_tai_at_midnight(2021, 3, 4); let kep = Orbit::keplerian( 8_191.93, 0.024_5, 12.85, 306.614, 314.19, 99.887_7, epoch, eme2k, ); for sma_incr in 100..1000 { - let new_sma = kep.sma_km() + f64::from(sma_incr); - f64_eq!(kep.with_sma(new_sma).sma_km(), new_sma, "wrong sma"); + let new_sma = kep.sma_km().unwrap() + f64::from(sma_incr); + f64_eq!( + kep.with_sma(new_sma).expect("with_*").sma_km().unwrap(), + new_sma, + "wrong sma" + ); } for ecc_incr in 0..100 { - let new_ecc = kep.ecc() + f64::from(ecc_incr) / 100.0; - let new_state = kep.with_ecc(new_ecc); + let new_ecc = kep.ecc().unwrap() + f64::from(ecc_incr) / 100.0; + let new_state = kep.with_ecc(new_ecc).expect("with_*"); f64_eq!( - new_state.ecc(), + new_state.ecc().unwrap(), new_ecc, - format!("wrong ecc: got {}\twanted {}", new_state.inc_deg(), new_ecc) + format!( + "wrong ecc: got {}\twanted {}", + new_state.inc_deg().unwrap(), + new_ecc + ) ); } for angle_incr in 0..360 { - let new_aop = between_0_360(kep.aop_deg() + f64::from(angle_incr)); - let new_state = kep.add_aop_deg(f64::from(angle_incr)); + let new_aop = between_0_360(kep.aop_deg().unwrap() + f64::from(angle_incr)); + let new_state = kep.add_aop_deg(f64::from(angle_incr)).unwrap(); f64_eq!( - new_state.aop_deg(), + new_state.aop_deg().unwrap(), new_aop, - format!("wrong aop: got {}\twanted {}", new_state.aop_deg(), new_aop) + format!( + "wrong aop: got {}\twanted {}", + new_state.aop_deg().unwrap(), + new_aop + ) ); } for angle_incr in 0..360 { - let new_raan = between_0_360(kep.raan_deg() + f64::from(angle_incr)); - let new_state = kep.add_raan_deg(f64::from(angle_incr)); + let new_raan = between_0_360(kep.raan_deg().unwrap() + f64::from(angle_incr)); + let new_state = kep.add_raan_deg(f64::from(angle_incr)).unwrap(); f64_eq!( - new_state.raan_deg(), + new_state.raan_deg().unwrap(), new_raan, format!( "wrong raan: got {}\twanted {}", - new_state.raan_deg(), + new_state.raan_deg().unwrap(), new_raan ) ); } for angle_incr in 0..360 { - let new_ta = between_0_360(kep.ta_deg() + f64::from(angle_incr)); - let new_state = kep.with_ta_deg(new_ta); + let new_ta = between_0_360(kep.ta_deg().unwrap() + f64::from(angle_incr)); + let new_state = kep.with_ta_deg(new_ta).unwrap(); f64_eq!( - new_state.ta_deg(), + new_state.ta_deg().unwrap(), new_ta, - format!("wrong ta: got {}\twanted {}", new_state.aop_deg(), new_ta) + format!( + "wrong ta: got {}\twanted {}", + new_state.aop_deg().unwrap(), + new_ta + ) ); } for angle_incr in 0..360 { // NOTE: Inclination is bounded between 0 and 180, hence the slightly different logic here. - let new_inc = between_pm_180(kep.inc_deg() + f64::from(angle_incr)).abs(); - let new_state = kep.add_inc_deg(f64::from(angle_incr)); + let new_inc = between_pm_180(kep.inc_deg().unwrap() + f64::from(angle_incr)).abs(); + let new_state = kep.add_inc_deg(f64::from(angle_incr)).unwrap(); f64_eq!( - new_state.inc_deg(), + new_state.inc_deg().unwrap(), new_inc, - format!("wrong inc: got {}\twanted {}", new_state.inc_deg(), new_inc) + format!( + "wrong inc: got {}\twanted {}", + new_state.inc_deg().unwrap(), + new_inc + ) ); } for apsis_delta in 100..1000 { - let new_ra = kep.apoapsis_km() + f64::from(apsis_delta); - let new_rp = kep.periapsis_km() - f64::from(apsis_delta); + let new_ra = kep.apoapsis_km().unwrap() + f64::from(apsis_delta); + let new_rp = kep.periapsis_km().unwrap() - f64::from(apsis_delta); let new_orbit = kep.with_apoapsis_periapsis_km(new_ra, new_rp).unwrap(); - f64_eq!(new_orbit.apoapsis_km(), new_ra, "wrong ra"); - f64_eq!(new_orbit.periapsis_km(), new_rp, "wrong rp"); + f64_eq!(new_orbit.apoapsis_km().unwrap(), new_ra, "wrong ra"); + f64_eq!(new_orbit.periapsis_km().unwrap(), new_rp, "wrong rp"); } } diff --git a/tests/context/mod.rs b/tests/context/mod.rs index d9c62604..abc95859 100644 --- a/tests/context/mod.rs +++ b/tests/context/mod.rs @@ -1,11 +1,13 @@ #[test] fn test_load_ctx() { - // Start bycreating the ANISE planetary data + // Start by creating the ANISE planetary data use anise::{ naif::kpl::parser::convert_tpc, prelude::{Almanac, BPC, SPK}, }; + dbg!(core::mem::size_of::()); + let dataset = convert_tpc("data/pck00008.tpc", "data/gm_de431.tpc").unwrap(); // Load BSP and BPC @@ -14,9 +16,11 @@ fn test_load_ctx() { let spk = SPK::load("data/de440.bsp").unwrap(); let bpc = BPC::load("data/earth_latest_high_prec.bpc").unwrap(); - let mut loaded_ctx = ctx.load_spk(&spk).unwrap().load_bpc(&bpc).unwrap(); + let mut loaded_ctx = ctx.load_spk(spk).unwrap().load_bpc(bpc).unwrap(); loaded_ctx.planetary_data = dataset; println!("{loaded_ctx}"); + + dbg!(core::mem::size_of::()); } diff --git a/tests/ephemerides/mod.rs b/tests/ephemerides/mod.rs index a9a90865..264cdfbb 100644 --- a/tests/ephemerides/mod.rs +++ b/tests/ephemerides/mod.rs @@ -11,5 +11,5 @@ mod parent_translation_verif; mod paths; mod translation; -#[cfg(feature = "validation")] +#[cfg(feature = "spkezr_validation")] mod validation; diff --git a/tests/ephemerides/parent_translation_verif.rs b/tests/ephemerides/parent_translation_verif.rs index e47b70d1..f74579ef 100644 --- a/tests/ephemerides/parent_translation_verif.rs +++ b/tests/ephemerides/parent_translation_verif.rs @@ -15,7 +15,7 @@ use anise::file2heap; use anise::math::Vector3; use anise::prelude::*; -const ZEROS: &'static [u8] = &[0; 2048]; +const ZEROS: &[u8] = &[0; 2048]; /// Test that we can load data from a static pointer to it. #[test] fn invalid_load_from_static() { @@ -30,7 +30,7 @@ fn de438s_parent_translation_verif() { let bytes = file2heap!("data/de440s.bsp").unwrap(); let de438s = SPK::parse(bytes).unwrap(); - let ctx = Almanac::from_spk(&de438s).unwrap(); + let ctx = Almanac::from_spk(de438s).unwrap(); let epoch = Epoch::from_gregorian_utc_at_midnight(2002, 2, 7); @@ -46,58 +46,41 @@ fn de438s_parent_translation_verif() { ['9.5205530594596043e+07', '-4.6160758818180226e+07', '-2.6779476581501361e+07', '1.6612048969243794e+01', '2.8272067093941200e+01', '1.1668575714409423e+01'] */ - let (pos, vel, acc, _) = ctx - .translate_to_parent( - VENUS_J2000, - epoch, - Aberration::None, - LengthUnit::Kilometer, - TimeUnit::Second, - ) + let state = ctx + .translate_to_parent(VENUS_J2000, epoch, Aberration::None) .unwrap(); + let pos_km = state.radius_km; + let vel_km_s = state.velocity_km_s; + let pos_expct_km = Vector3::new( - 9.5205530594596043e+07, - -4.6160758818180226e+07, - -2.6779476581501361e+07, + 9.520_553_059_459_604e7, + -4.616_075_881_818_022_6e7, + -2.677_947_658_150_136e7, ); let vel_expct_km_s = Vector3::new( - 1.6612048969243794e+01, - 2.8272067093941200e+01, - 1.1668575714409423e+01, + 1.661_204_896_924_379_4e1, + 2.827_206_709_394_12e1, + 1.166_857_571_440_942_3e1, ); // We expect exactly the same output as SPICE to machine precision. - assert!((pos - pos_expct_km).norm() < EPSILON); - assert!((vel - vel_expct_km_s).norm() < EPSILON); - assert!(acc.norm() < EPSILON); - - // Same thing but in Megameters per millisecond - let (pos, vel, acc, _) = ctx - .translate_to_parent( - VENUS_J2000, - epoch, - Aberration::None, - LengthUnit::Megameter, - TimeUnit::Millisecond, - ) - .unwrap(); + assert!((pos_km - pos_expct_km).norm() < EPSILON); + assert!((vel_km_s - vel_expct_km_s).norm() < EPSILON); // We expect exactly the same output as SPICE to machine precision. assert!( - (pos - pos_expct_km * 1e-3).norm() < EPSILON, + (pos_km - pos_expct_km).norm() < EPSILON, "got {} but want {}", - pos, - pos_expct_km * 1e-3 + pos_km, + pos_expct_km ); - // NOTE: km/s and Mm/ms correspond to the same number: times 1e3 for km -> Mm and times 1e-3 for s -> ms. assert!( - (vel - vel_expct_km_s).norm() < EPSILON, + (vel_km_s - vel_expct_km_s).norm() < EPSILON, "got {} but want {}", - vel, + vel_km_s, vel_expct_km_s ); - assert!(acc.norm() < EPSILON); } diff --git a/tests/ephemerides/paths.rs b/tests/ephemerides/paths.rs index a87fbb6f..4b852e59 100644 --- a/tests/ephemerides/paths.rs +++ b/tests/ephemerides/paths.rs @@ -30,7 +30,7 @@ fn common_root_verif() { for path in ["./data/de430.bsp", "./data/de440s.bsp", "./data/de440.bsp"] { let buf = file2heap!(path).unwrap(); let spk = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk).unwrap(); + let ctx = Almanac::from_spk(spk).unwrap(); // The root of all these files should be the SSB assert_eq!( diff --git a/tests/ephemerides/translation.rs b/tests/ephemerides/translation.rs index 7dc58d52..2acb374a 100644 --- a/tests/ephemerides/translation.rs +++ b/tests/ephemerides/translation.rs @@ -30,7 +30,7 @@ fn de440s_translation_verif_venus2emb() { let path = "./data/de440s.bsp"; let buf = file2heap!(path).unwrap(); let spk = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk).unwrap(); + let ctx = Almanac::from_spk(spk).unwrap(); let epoch = Epoch::from_gregorian_utc_at_midnight(2002, 2, 7); @@ -49,31 +49,25 @@ fn de440s_translation_verif_venus2emb() { '2.0519128282958704e+01'] */ - dbg!(ctx - .common_ephemeris_path(VENUS_J2000, EARTH_MOON_BARYCENTER_J2000, epoch) - .unwrap()); - let state = ctx .translate_from_to( VENUS_J2000, EARTH_MOON_BARYCENTER_J2000, epoch, Aberration::None, - LengthUnit::Kilometer, - TimeUnit::Second, ) .unwrap(); let pos_expct_km = Vector3::new( - 2.0504464297378346e+08, - -1.3595802364930704e+08, - -6.5722791478621781e+07, + 2.050_446_429_737_834_6e8, + -1.359_580_236_493_070_4e8, + -6.572_279_147_862_178e7, ); let vel_expct_km_s = Vector3::new( - 3.7012086125533884e+01, - 4.8685441394651654e+01, - 2.0519128282958704e+01, + 3.701_208_612_553_388_4e1, + 4.868_544_139_465_165_4e1, + 2.051_912_828_295_870_4e1, ); // We expect exactly the same output as SPICE to machine precision. @@ -93,7 +87,7 @@ fn de440s_translation_verif_venus2emb() { // Test the opposite translation let state = ctx - .translate_from_to_km_s_geometric(EARTH_MOON_BARYCENTER_J2000, VENUS_J2000, epoch) + .translate_from_to_geometric(EARTH_MOON_BARYCENTER_J2000, VENUS_J2000, epoch) .unwrap(); // We expect exactly the same output as SPICE to machine precision. @@ -122,7 +116,7 @@ fn de438s_translation_verif_venus2luna() { let path = "./data/de440s.bsp"; let buf = file2heap!(path).unwrap(); let spk = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk).unwrap(); + let ctx = Almanac::from_spk(spk).unwrap(); let epoch = Epoch::from_gregorian_utc_at_midnight(2002, 2, 7); @@ -145,26 +139,19 @@ fn de438s_translation_verif_venus2luna() { */ let state = ctx - .translate_from_to( - VENUS_J2000, - LUNA_J2000, - epoch, - Aberration::None, - LengthUnit::Kilometer, - TimeUnit::Second, - ) + .translate_from_to(VENUS_J2000, LUNA_J2000, epoch, Aberration::None) .unwrap(); let pos_expct_km = Vector3::new( - 2.0512621956428146e+08, - -1.3561254796010864e+08, - -6.5578399619259715e+07, + 2.051_262_195_642_814_6e8, + -1.356_125_479_601_086_4e8, + -6.557_839_961_925_971_5e7, ); let vel_expct_km_s = Vector3::new( - 3.6051374280511325e+01, - 4.8889024619544145e+01, - 2.0702933797799531e+01, + 3.605_137_428_051_132_5e1, + 4.888_902_461_954_414_5e1, + 2.070_293_379_779_953e1, ); // We expect exactly the same output as SPICE to machine precision. @@ -188,7 +175,7 @@ fn de438s_translation_verif_venus2luna() { // Test the opposite translation let state = ctx - .translate_from_to_km_s_geometric(LUNA_J2000, VENUS_J2000, epoch) + .translate_from_to_geometric(LUNA_J2000, VENUS_J2000, epoch) .unwrap(); // We expect exactly the same output as SPICE to machine precision. @@ -221,7 +208,7 @@ fn de438s_translation_verif_emb2luna() { let path = "./data/de440s.bsp"; let buf = file2heap!(path).unwrap(); let spk = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk).unwrap(); + let ctx = Almanac::from_spk(spk).unwrap(); let epoch = Epoch::from_gregorian_utc_at_midnight(2002, 2, 7); @@ -248,8 +235,6 @@ fn de438s_translation_verif_emb2luna() { LUNA_J2000, epoch, Aberration::None, - LengthUnit::Kilometer, - TimeUnit::Second, ) .unwrap(); @@ -257,15 +242,15 @@ fn de438s_translation_verif_emb2luna() { assert_eq!(state.frame, LUNA_J2000); let pos_expct_km = Vector3::new( - 8.1576590498004080e+04, - 3.4547568919842143e+05, - 1.4439185936206434e+05, + 8.157_659_049_800_408e4, + 3.454_756_891_984_214_3e5, + 1.443_918_593_620_643_4e5, ); let vel_expct_km_s = Vector3::new( - -9.6071184502255447e-01, - 2.0358322489248903e-01, - 1.8380551484083130e-01, + -9.607_118_450_225_545e-1, + 2.035_832_248_924_890_3e-1, + 1.838_055_148_408_313e-1, ); // We expect exactly the same output as SPICE to machine precision. @@ -294,8 +279,6 @@ fn de438s_translation_verif_emb2luna() { EARTH_MOON_BARYCENTER_J2000, epoch, Aberration::None, - LengthUnit::Kilometer, - TimeUnit::Second, ) .unwrap(); @@ -333,9 +316,9 @@ fn spk_hermite_type13_verif() { let buf = file2heap!("data/gmat-hermite.bsp").unwrap(); let spacecraft = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk) + let ctx = Almanac::from_spk(spk) .unwrap() - .load_spk(&spacecraft) + .load_spk(spacecraft) .unwrap(); let epoch = Epoch::from_gregorian_hms(2000, 1, 1, 14, 0, 0, TimeScale::UTC); @@ -343,7 +326,7 @@ fn spk_hermite_type13_verif() { let my_sc_j2k = Frame::from_ephem_j2000(-10000001); let state = ctx - .translate_from_to_km_s_geometric(my_sc_j2k, EARTH_J2000, epoch) + .translate_from_to_geometric(my_sc_j2k, EARTH_J2000, epoch) .unwrap(); println!("{state:?}"); @@ -351,15 +334,15 @@ fn spk_hermite_type13_verif() { assert_eq!(state.frame, EARTH_J2000); let pos_expct_km = Vector3::new( - 2.5920090775006811e+03, - 6.7469273862520186e+03, - 1.3832553421282723e+03, + 2.592_009_077_500_681e3, + 6.746_927_386_252_019e3, + 1.383_255_342_128_272_3e3, ); let vel_expct_km_s = Vector3::new( - -6.6688457210358747e+00, - 2.7743470870318045e+00, - -8.5832497027451471e-01, + -6.668_845_721_035_875, + 2.774_347_087_031_804_5, + -8.583_249_702_745_147e-1, ); assert!( @@ -389,7 +372,7 @@ fn multithread_query() { let path = "./data/de440s.bsp"; let buf = file2heap!(path).unwrap(); let spk = SPK::parse(buf).unwrap(); - let ctx = Almanac::from_spk(&spk).unwrap(); + let ctx = Almanac::from_spk(spk).unwrap(); let start_epoch = Epoch::from_str("2000-01-01T00:00:00 ET").unwrap(); @@ -402,7 +385,7 @@ fn multithread_query() { let epochs: Vec = time_it.collect(); epochs.into_par_iter().for_each(|epoch| { let state = ctx - .translate_from_to_km_s_geometric(LUNA_J2000, EARTH_MOON_BARYCENTER_J2000, epoch) + .translate_from_to_geometric(LUNA_J2000, EARTH_MOON_BARYCENTER_J2000, epoch) .unwrap(); println!("{state:?}"); }); @@ -410,3 +393,64 @@ fn multithread_query() { let delta_t = Epoch::now().unwrap() - start; println!("Took {delta_t}"); } + +#[test] +fn hermite_query() { + use anise::naif::kpl::parser::convert_tpc; + + let traj = SPK::load("./data/gmat-hermite.bsp").unwrap(); + let summary = traj.data_summaries().unwrap()[0]; + println!("{}", summary); + + let mut ctx = Almanac::from_spk(traj).unwrap(); + // Also load the plantery data + ctx.planetary_data = convert_tpc("data/pck00008.tpc", "data/gm_de431.tpc").unwrap(); + + let summary_from_ctx = ctx.spk_summary_from_name("SPK_SEGMENT").unwrap().0; + + // The UIDs of the frames match. + assert_eq!( + summary.center_frame_uid(), + summary_from_ctx.center_frame_uid() + ); + + // And the summaries match + assert_eq!(&summary, summary_from_ctx); + + let summary_duration = summary.end_epoch() - summary.start_epoch(); + + // Query in the middle to the parent, since we don't have anything else loaded. + let state = ctx + .translate_from_to( + summary.target_frame(), + summary.center_frame(), + summary.start_epoch() + summary_duration * 0.5, + Aberration::None, + ) + .unwrap(); + + // This tests that we've loaded the frame info from the Almanac, otherwise we cannot compute the orbital elements. + assert_eq!(format!("{state:x}"), "[Earth J2000] 2000-01-01T13:39:27.999998123 UTC\tsma = 7192.041350 km\tecc = 0.024628\tinc = 12.851841 deg\traan = 306.170038 deg\taop = 315.085528 deg\tta = 96.135384 deg"); + + // Fetch the state at the start of this spline to make sure we don't glitch. + assert!(ctx + .translate_from_to( + summary.target_frame(), + summary.center_frame(), + summary.start_epoch(), + Aberration::None, + ) + .is_ok()); + + // The very last state may fail because of a rounding difference in hifitime when going to/from TDB + // For example, in this exact case, the end_epoch is shown to be 12032.183931521 seconds but the epoch + // data in the BSP is 12032.1839315118(27), or 30 ns. This fix is in progress in hifitime v4. + // assert!(ctx + // .translate_from_to( + // summary.target_frame(), + // to_frame, + // summary.end_epoch(), + // Aberration::None, + // ) + // .is_ok()); +} diff --git a/tests/ephemerides/validation/compare.rs b/tests/ephemerides/validation/compare.rs index 00a97f9a..baea02d9 100644 --- a/tests/ephemerides/validation/compare.rs +++ b/tests/ephemerides/validation/compare.rs @@ -18,7 +18,7 @@ use log::{error, info}; use parquet::{arrow::ArrowWriter, file::properties::WriterProperties}; use std::{collections::HashMap, fs::File, sync::Arc}; -const COMPONENT: &[&'static str] = &["X", "Y", "Z", "VX", "VY", "VZ"]; +const COMPONENT: &[&str] = &["X", "Y", "Z", "VX", "VY", "VZ"]; // Number of items to keep in memory before flushing to the parquet file const BATCH_SIZE: usize = 10_000; @@ -108,7 +108,7 @@ impl CompareEphem { let props = WriterProperties::builder().build(); let writer = ArrowWriter::try_new(file, Arc::new(schema), Some(props)).unwrap(); - let me = Self { + Self { output_file_name, input_file_names, num_queries_per_pair, @@ -121,9 +121,7 @@ impl CompareEphem { batch_spice_val: Vec::new(), batch_anise_val: Vec::new(), batch_abs_diff: Vec::new(), - }; - - me + } } /// Executes this ephemeris validation and return the number of querying errors @@ -210,7 +208,7 @@ impl CompareEphem { } } - for spk in &spks { + for spk in spks { ctx = ctx.load_spk(spk).unwrap(); } @@ -223,8 +221,7 @@ impl CompareEphem { / (self.num_queries_per_pair as f64)) .seconds(); - let mut time_it = - TimeSeries::exclusive(*start_epoch, *end_epoch - time_step, time_step); + let time_it = TimeSeries::exclusive(*start_epoch, *end_epoch - time_step, time_step); info!("{time_it} for {from_frame} -> {to_frame} "); @@ -232,22 +229,21 @@ impl CompareEphem { continue; } - while let Some(epoch) = time_it.next() { - let data = match ctx.translate_from_to_km_s_geometric(*from_frame, *to_frame, epoch) - { + for epoch in time_it { + let data = match ctx.translate_from_to_geometric(*from_frame, *to_frame, epoch) { Ok(state) => { // Find the SPICE names let targ = - match SPKSummaryRecord::human_name_to_id(&format!("{from_frame:e}")) { + match SPKSummaryRecord::spice_name_to_id(&format!("{from_frame:e}")) { Ok(id) => { - SPKSummaryRecord::id_to_human_name(id).unwrap().to_string() + SPKSummaryRecord::id_to_spice_name(id).unwrap().to_string() } Err(_) => format!("{from_frame:e}"), }; - let obs = match SPKSummaryRecord::human_name_to_id(&format!("{to_frame:e}")) + let obs = match SPKSummaryRecord::spice_name_to_id(&format!("{to_frame:e}")) { - Ok(id) => SPKSummaryRecord::id_to_human_name(id).unwrap().to_string(), + Ok(id) => SPKSummaryRecord::id_to_spice_name(id).unwrap().to_string(), Err(_) => format!("{to_frame:e}"), }; @@ -255,7 +251,7 @@ impl CompareEphem { let (spice_state, _) = spice::spkezr(&targ, epoch.to_et_seconds(), "J2000", "NONE", &obs); - let data = EphemValData { + EphemValData { src_frame: format!("{from_frame:e}"), dst_frame: format!("{to_frame:e}"), epoch_et_s: epoch.to_et_seconds(), @@ -271,9 +267,7 @@ impl CompareEphem { anise_val_vx_km_s: state.velocity_km_s.x, anise_val_vy_km_s: state.velocity_km_s.y, anise_val_vz_km_s: state.velocity_km_s.z, - }; - - data + } } Err(e) => { diff --git a/tests/naif.rs b/tests/naif.rs index a3613939..de36774b 100644 --- a/tests/naif.rs +++ b/tests/naif.rs @@ -36,9 +36,15 @@ fn test_binary_pck_load() { assert_eq!(high_prec.crc32(), 0x97bca34c); assert!(high_prec.scrub().is_ok()); - for n in 0..high_prec.daf_summary().unwrap().num_summaries() { - let (name, data) = high_prec.nth_summary(n).unwrap(); - println!("{} -> {:?}", name, data); + let name_rcrd = high_prec.name_record().unwrap(); + let summary_size = high_prec.file_record().unwrap().summary_size(); + for idx in 0..name_rcrd.num_entries(summary_size) { + let summary = &high_prec.data_summaries().unwrap()[idx]; + if summary.is_empty() { + break; + } + let name = name_rcrd.nth_name(idx, summary_size); + println!("{} -> {:?}", name, summary); } } @@ -56,13 +62,22 @@ fn test_spk_load_bytes() { assert_eq!(de421.crc32(), 0x5c78bc13); assert!(de421.scrub().is_ok()); - assert_eq!(de421.file_record.nd, 2); - assert_eq!(de421.file_record.ni, 6); - assert_eq!(de421.file_record.identification().unwrap(), "SPK"); - assert_eq!(de421.file_record.internal_filename().unwrap(), "NIO2SPK"); - assert_eq!(de421.file_record.forward, 4); - assert_eq!(de421.file_record.backward, 4); - assert_eq!(de421.file_record.endianness().unwrap(), Endian::Little); + assert_eq!(de421.file_record().unwrap().nd, 2); + assert_eq!(de421.file_record().unwrap().ni, 6); + assert_eq!( + de421.file_record().unwrap().identification().unwrap(), + "SPK" + ); + assert_eq!( + de421.file_record().unwrap().internal_filename().unwrap(), + "NIO2SPK" + ); + assert_eq!(de421.file_record().unwrap().forward, 4); + assert_eq!(de421.file_record().unwrap().backward, 4); + assert_eq!( + de421.file_record().unwrap().endianness().unwrap(), + Endian::Little + ); assert_eq!(de421.daf_summary().unwrap().num_summaries(), 15); assert_eq!(de421.daf_summary().unwrap().next_record(), 0); assert_eq!(de421.daf_summary().unwrap().prev_record(), 0); @@ -78,12 +93,21 @@ fn test_spk_load_bytes() { 7040, 3520, 3520, 1760, 1760, 1760, 1760, 1760, 1760, 3520, 14080, 14080, 1, 1, 1, ]; - for n in 0..de421.daf_summary().unwrap().num_summaries() { - let (name, summary) = de421.nth_summary(n).unwrap(); + let name_rcrd = de421.name_record().unwrap(); + let summary_size = de421.file_record().unwrap().summary_size(); + + for (n, segment) in seg_len + .iter() + .enumerate() + .take(de421.daf_summary().unwrap().num_summaries()) + { + let name = name_rcrd.nth_name(n, summary_size); + let summary = &de421.data_summaries().unwrap()[n]; + println!("{} -> {}", name, summary); // We know that the DE421 data is all in Type 2 let data_set = de421.nth_data::(n).unwrap(); - assert_eq!(data_set.num_records, seg_len[n]); + assert_eq!(data_set.num_records, *segment); if summary.target_id == 301 { assert_eq!( summary.start_idx, 944041, @@ -112,23 +136,27 @@ fn test_spk_load_bytes() { println!("{data_set}"); // Put this in a context - let spice = Almanac::default(); - let spice = spice.load_spk(&de421).unwrap(); + let default_almanac = Almanac::default(); + let spice = default_almanac.load_spk(de421).unwrap(); + assert_eq!(spice.num_loaded_spk(), 1); + assert_eq!(default_almanac.num_loaded_spk(), 0); // Now load another DE file // NOTE: Rust has strict lifetime requirements, and the Spice Context is set up such that loading another dataset will return a new context with that data set loaded in it. { let bytes = file2heap!("data/de440.bsp").unwrap(); let de440 = DAF::::parse(bytes).unwrap(); - let spice = spice.load_spk(&de440).unwrap(); + let spice = spice.load_spk(de440).unwrap(); // And another let bytes = file2heap!("data/de440s.bsp").unwrap(); let de440 = DAF::::parse(bytes).unwrap(); - let spice = spice.load_spk(&de440).unwrap(); + let spice = spice.load_spk(de440).unwrap(); // NOTE: Because everything is a pointer, the size on the stack remains constant at 521 bytes. println!("{}", size_of_val(&spice)); + assert_eq!(spice.num_loaded_spk(), 3); + assert_eq!(default_almanac.num_loaded_spk(), 0); } // NOTE: Because everything is a pointer, the size on the stack remains constant at 521 bytes. @@ -147,13 +175,16 @@ fn test_spk_rename_summary() { let example_data = SPK::load(path).unwrap(); - example_data - .name_record - .set_nth_name(0, example_data.file_record.summary_size(), "BLAH BLAH"); + example_data.name_record().unwrap().set_nth_name( + 0, + example_data.file_record().unwrap().summary_size(), + "BLAH BLAH", + ); dbg!(example_data - .name_record - .nth_name(0, example_data.file_record.summary_size())); + .name_record() + .unwrap() + .nth_name(0, example_data.file_record().unwrap().summary_size())); example_data.persist("target/rename-test.bsp").unwrap(); }