ED_LRR/rust/src/common.rs

770 lines
20 KiB
Rust

//! # Common utlility functions
use crate::route::Router;
use bincode::Options;
use crossbeam_channel::{bounded, Receiver};
use csv::ByteRecord;
use dict_derive::IntoPyObject;
use eyre::Result;
use log::*;
use nohash_hasher::NoHashHasher;
use pyo3::prelude::*;
use pyo3::types::PyDict;
use pyo3::{conversion::ToPyObject, create_exception};
use pythonize::depythonize;
use serde::{Deserialize, Serialize};
use sha3::{Digest, Sha3_256};
use std::collections::{BTreeMap, HashMap, HashSet, VecDeque};
use std::hash::{BuildHasherDefault, Hash, Hasher};
use std::ops::{Deref, DerefMut};
use std::path::Path;
use std::str::FromStr;
use std::thread;
use std::{cmp::Ordering, cmp::Reverse, collections::BinaryHeap};
use std::{
fs::File,
io::{BufReader, BufWriter},
path::PathBuf,
};
use thiserror::Error;
#[inline(always)]
pub fn heuristic(range: f32, node: &TreeNode, goal: &TreeNode) -> f32 {
// distance remaining after jumping from node towards goal
let a2 = dist(&node.pos, &goal.pos);
let mult = node.get_mult();
let b2 = range * mult;
return (a2 - b2).max(0.0);
}
/// Min-heap priority queue using f32 as priority
pub struct MinFHeap<T: Ord>(BinaryHeap<(Reverse<F32>, T)>);
/// Max-heap priority queue using f32 as priority
pub struct MaxFHeap<T: Ord>(BinaryHeap<(F32, T)>);
impl<T: Ord> MaxFHeap<T> {
/// Create new, empty priority queue
pub fn new() -> Self {
MaxFHeap(BinaryHeap::new())
}
/// push value `item` with priority `w` into queue
pub fn push(&mut self, w: f32, item: T) {
self.0.push((F32(w), item))
}
/// Remove and return largest item and priority
pub fn pop(&mut self) -> Option<(f32, T)> {
self.0.pop().map(|(F32(w), item)| (w, item))
}
}
impl<T: Ord> Default for MaxFHeap<T> {
fn default() -> Self {
return MaxFHeap(BinaryHeap::new());
}
}
impl<T: Ord> Deref for MaxFHeap<T> {
type Target = BinaryHeap<(F32, T)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T: Ord> DerefMut for MaxFHeap<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T: Ord> MinFHeap<T> {
/// Create new, empty priority queue
pub fn new() -> Self {
MinFHeap(BinaryHeap::new())
}
/// push value `item` with priority `w` into queue
pub fn push(&mut self, w: f32, item: T) {
self.0.push((Reverse(F32(w)), item))
}
/// Remove and return smallest item and priority
pub fn pop(&mut self) -> Option<(f32, T)> {
self.0.pop().map(|(Reverse(F32(w)), item)| (w, item))
}
}
impl<T: Ord> Default for MinFHeap<T> {
fn default() -> Self {
return MinFHeap(BinaryHeap::new());
}
}
impl<T: Ord> Deref for MinFHeap<T> {
type Target = BinaryHeap<(Reverse<F32>, T)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T: Ord> DerefMut for MinFHeap<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
/// ED LRR error type
#[derive(Error, Debug)]
pub enum EdLrrError {
#[error("failed to compute route from {from:?} to {to:?}: {reason}")]
RouteError {
from: Option<System>,
to: Option<System>,
reason: String,
},
#[error("failed to find system matching {0:?}")]
ResolveError(String),
#[error("runtime error: {0:?}")]
RuntimeError(String),
#[error("Failed to process {0}")]
ProcessingError(PathBuf),
#[error(transparent)]
EvalError(#[from] eval::Error),
#[error(transparent)]
CSVError(#[from] csv::Error),
#[error(transparent)]
IOError(#[from] std::io::Error),
#[error(transparent)]
BincodeError(#[from] Box<bincode::ErrorKind>),
#[error(transparent)]
PyError(#[from] pyo3::PyErr),
#[error(transparent)]
Error(#[from] eyre::Error),
#[error("unknown error")]
Unknown,
}
pub mod py_exceptions {
use super::*;
pub use pyo3::exceptions::*;
create_exception!(_ed_lrr, RouteError, PyException);
create_exception!(_ed_lrr, ResolveError, PyException);
create_exception!(_ed_lrr, EdLrrException, PyException);
create_exception!(_ed_lrr, ProcessingError, PyException);
create_exception!(_ed_lrr, FileFormatError, PyException);
}
impl FromStr for EdLrrError {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self::RuntimeError(s.to_owned()))
}
}
impl std::convert::From<String> for EdLrrError {
fn from(s: String) -> Self {
Self::RuntimeError(s)
}
}
impl std::convert::From<EdLrrError> for PyErr {
fn from(err: EdLrrError) -> PyErr {
match err {
EdLrrError::PyError(e) => e,
EdLrrError::BincodeError(..) => {
py_exceptions::FileFormatError::new_err(err.to_string())
}
EdLrrError::RouteError { .. } => py_exceptions::RouteError::new_err(err.to_string()),
EdLrrError::RuntimeError(msg) => py_exceptions::PyRuntimeError::new_err(msg),
EdLrrError::ResolveError(..) => py_exceptions::PyRuntimeError::new_err(err.to_string()),
EdLrrError::EvalError(err) => py_exceptions::PyRuntimeError::new_err(err.to_string()),
EdLrrError::CSVError(err) => py_exceptions::PyRuntimeError::new_err(err.to_string()),
EdLrrError::IOError(err) => py_exceptions::PyIOError::new_err(err.to_string()),
EdLrrError::Error(err) => py_exceptions::EdLrrException::new_err(err.to_string()),
EdLrrError::ProcessingError(buf) => {
py_exceptions::ProcessingError::new_err(format!("{}", buf.display()))
}
EdLrrError::Unknown => {
py_exceptions::EdLrrException::new_err("Unknown error!".to_string())
}
}
}
}
pub type EdLrrResult<T> = Result<T, EdLrrError>;
/// f32 compare wrapper
pub fn fcmp(a: f32, b: f32) -> Ordering {
match (a, b) {
(x, y) if x.is_nan() && y.is_nan() => Ordering::Equal,
(x, _) if x.is_nan() => Ordering::Greater,
(_, y) if y.is_nan() => Ordering::Less,
(..) => a.partial_cmp(&b).unwrap(),
}
}
/// f32 warpper type implementing `Eq` and `Ord`
#[derive(Debug)]
pub struct F32(pub f32);
impl PartialEq for F32 {
fn eq(&self, other: &F32) -> bool {
fcmp(self.0, other.0) == std::cmp::Ordering::Equal
}
}
impl Eq for F32 {}
impl PartialOrd for F32 {
fn partial_cmp(&self, other: &F32) -> Option<std::cmp::Ordering> {
Some(fcmp(self.0, other.0))
}
}
impl Ord for F32 {
fn cmp(&self, other: &F32) -> std::cmp::Ordering {
fcmp(self.0, other.0)
}
}
impl Deref for F32 {
type Target = f32;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for F32 {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
/// Returns additional jump range (in Ly) granted by specified class of Guardian FSD Booster
pub fn get_fsd_booster_info(class: usize) -> Result<f32, String> {
// Data from https://elite-dangerous.fandom.com/wiki/Guardian_Frame_Shift_Drive_Booster
let ret = match class {
0 => 0.0,
1 => 4.0,
2 => 6.0,
3 => 7.75,
4 => 9.25,
5 => 10.5,
_ => return Err(format!("Invalid Guardian booster class: {}", class)),
};
return Ok(ret);
}
/// Returns optimal mass and maximum fuel per jump for the given FSD rating and class as a hash map
pub fn get_fsd_info(rating: usize, class: usize) -> Result<HashMap<String, f32>, String> {
let mut ret = HashMap::new();
// Data from https://elite-dangerous.fandom.com/wiki/Frame_Shift_Drive#Specifications
let (opt_mass, max_fuel) = match (class, rating) {
(2, 1) => (48.0, 0.6),
(2, 2) => (54.0, 0.6),
(2, 3) => (60.0, 0.6),
(2, 4) => (75.0, 0.8),
(2, 5) => (90.0, 0.9),
(3, 1) => (80.0, 1.2),
(3, 2) => (90.0, 1.2),
(3, 3) => (100.0, 1.2),
(3, 4) => (125.0, 1.5),
(3, 5) => (150.0, 1.8),
(4, 1) => (280.0, 2.0),
(4, 2) => (315.0, 2.0),
(4, 3) => (350.0, 2.0),
(4, 4) => (438.0, 2.5),
(4, 5) => (525.0, 3.0),
(5, 1) => (560.0, 3.3),
(5, 2) => (630.0, 3.3),
(5, 3) => (700.0, 3.3),
(5, 4) => (875.0, 4.1),
(5, 5) => (1050.0, 5.0),
(6, 1) => (960.0, 5.3),
(6, 2) => (1080.0, 5.3),
(6, 3) => (1200.0, 5.3),
(6, 4) => (1500.0, 6.6),
(6, 5) => (1800.0, 8.0),
(7, 1) => (1440.0, 8.5),
(7, 2) => (1620.0, 8.5),
(7, 3) => (1800.0, 8.5),
(7, 4) => (2250.0, 10.6),
(7, 5) => (2700.0, 12.8),
(r, c) => return Err(format!("Invalid FSD Type: Rating: {}, Class: {}", r, c)),
};
ret.insert("FSDOptimalMass".to_owned(), opt_mass);
ret.insert("MaxFuel".to_owned(), max_fuel);
return Ok(ret);
}
/// Returns jump range multiplier for the specified star type (4 for neutron stars, 1.5 for white dwarfs and 1.0 otherwise)
pub fn get_mult(star_type: &str) -> f32 {
if star_type.contains("White Dwarf") {
return 1.5;
}
if star_type.contains("Neutron") {
return 4.0;
}
1.0
}
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum BeamWidth {
Absolute(usize),
Fraction(f32),
Radius(f32),
Infinite,
}
impl std::fmt::Display for BeamWidth {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
BeamWidth::Absolute(n) => write!(f, "{}", n),
BeamWidth::Fraction(v) => write!(f, "{}%", (*v) * 100.0),
BeamWidth::Radius(r) => write!(f, "{} Ly", r),
BeamWidth::Infinite => write!(f, "Infinite"),
}?;
Ok(())
}
}
impl Default for BeamWidth {
fn default() -> Self {
Self::Infinite
}
}
impl FromPyObject<'_> for BeamWidth {
fn extract(ob: &PyAny) -> PyResult<Self> {
depythonize(ob).map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("{}", e)))
}
}
impl BeamWidth {
pub fn is_set(&self) -> bool {
match self {
Self::Fraction(f) => *f > 0.0,
Self::Absolute(n) => *n != 0,
Self::Radius(r) => *r > 0.0,
Self::Infinite => false,
}
}
pub fn is_infinite(&self) -> bool {
matches!(self, Self::Infinite)
}
pub fn compute(&self, nodes: usize) -> usize {
match self {
Self::Fraction(f) => {
let w = (nodes as f32) * f.max(0.0).min(1.0);
return (w.ceil() as usize).max(1);
}
Self::Absolute(n) => *n,
Self::Radius(_) | Self::Infinite => nodes,
}
}
}
/// Represents an uresolved system to be searched for by name, id or position
#[derive(Debug, FromPyObject)]
pub enum SysEntry {
ID(u32),
Name(String),
Pos((f32, f32, f32)),
}
impl ToPyObject for SysEntry {
fn to_object(&self, py: Python<'_>) -> PyObject {
match self {
Self::ID(id) => id.to_object(py),
Self::Name(name) => name.to_object(py),
Self::Pos(pos) => pos.to_object(py),
}
}
}
pub fn grid_stats(
path: &Path,
grid_size: f32,
) -> Result<BTreeMap<(i64, i64, i64), Vec<u32>>, String> {
let mut reader = match csv::ReaderBuilder::new().has_headers(false).from_path(path) {
Ok(rdr) => rdr,
Err(e) => {
return Err(format!("Error opening {}: {}", path.to_str().unwrap(), e));
}
};
let systems = reader.deserialize::<System>().map(Result::unwrap);
let mut ret: BTreeMap<(i64, i64, i64), Vec<u32>> = BTreeMap::new();
for sys in systems {
let k = (
((sys.pos[0] / grid_size).round() * grid_size) as i64,
((sys.pos[1] / grid_size).round() * grid_size) as i64,
((sys.pos[2] / grid_size).round() * grid_size) as i64,
);
ret.entry(k).or_default().push(sys.id);
}
Ok(ret)
}
pub enum Node {
Start,
Goal,
ID(u32),
}
pub enum Weight {
Dist(Node),
Depth,
}
impl Weight {
fn eval(&self) -> f32 {
todo!()
}
}
struct Weights(Vec<(f32, Weight)>);
impl Weights {
fn new() -> Self {
Self(vec![])
}
fn add(&mut self, w: f32, v: Weight) {
self.0.push((w, v));
}
fn eval(&mut self) -> f32 {
self.0.iter().map(|(w, v)| w * v.eval()).sum()
}
}
#[inline(always)]
pub fn dist2(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
let dx = p1[0] - p2[0];
let dy = p1[1] - p2[1];
let dz = p1[2] - p2[2];
dx * dx + dy * dy + dz * dz
}
#[inline(always)]
pub fn dist(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
dist2(p1, p2).sqrt()
}
#[inline(always)]
pub fn distm(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
let dx = (p1[0] - p2[0]).abs();
let dy = (p1[1] - p2[1]).abs();
let dz = (p1[2] - p2[2]).abs();
dx + dy + dz
}
/// Dot product (cosine of angle) between two 3D vectors
#[inline(always)]
pub fn ndot(u: &[f32; 3], v: &[f32; 3]) -> f32 {
let z: [f32; 3] = [0.0; 3];
let lm = dist(u, &z) * dist(v, &z);
(u[0] * v[0]) / lm + (u[1] * v[1]) / lm + (u[2] * v[2]) / lm
}
/// Hash the contents of `path` with sha3 and return the hash as a vector of bytes
fn hash_file(path: &Path) -> Vec<u8> {
let mut hash_reader = BufReader::new(File::open(path).unwrap());
let mut hasher = Sha3_256::new();
std::io::copy(&mut hash_reader, &mut hasher).unwrap();
hasher.finalize().iter().copied().collect()
}
/// Construct and `O(1)` lookup index for the csv file at `path`.
/// The structure of the index is `(sha3, Vec<usize>)`
/// where the first element is the sha3 hash of the file the index belongs to
/// followed by a deltified vector where the entry at index `i` is the file offset for line `i` of the csv file.
pub fn build_index(path: &Path) -> std::io::Result<()> {
let file_hash = hash_file(path);
let mut wtr = BufWriter::new(File::create(path.with_extension("idx"))?);
let mut idx: Vec<u8> = Vec::new();
let mut records = (csv::ReaderBuilder::new()
.has_headers(false)
.from_path(path)?)
.into_deserialize::<System>();
let mut n: usize = 0;
let mut size;
idx.push(0);
loop {
n += 1;
if n % 100000 == 0 {
info!("{} Bodies processed", n);
}
let new_pos = records.reader().position().byte();
if records.next().is_none() {
break;
}
size = records.reader().position().byte() - new_pos;
idx.push(size as u8);
}
assert_eq!(idx.len(), n);
bincode::serialize_into(&mut wtr, &(file_hash, idx)).unwrap();
Ok(())
}
/// Node for R*-Tree
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct TreeNode {
/// System ID
pub id: u32,
/// Position in space
pub pos: [f32; 3],
/// flags
/// 00 unscoopable
/// 01 scoopable
/// 10 white dward
/// 11 neutron star
pub flags: u8,
}
impl ToPyObject for TreeNode {
fn to_object(&self, py: Python<'_>) -> PyObject {
pythonize::pythonize(py, self).unwrap()
}
}
impl TreeNode {
/// Retrieve matching [System] for this tree node
pub fn get(&self, router: &Router) -> Result<Option<System>, String> {
router.get(self.id)
}
pub fn get_mult(&self) -> f32 {
match self.flags {
0b11 => 4.0,
0b10 => 1.5,
_ => 1.0,
}
}
}
impl PartialEq for TreeNode {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for TreeNode {}
impl PartialOrd for TreeNode {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.id.cmp(&other.id))
}
}
impl Ord for TreeNode {
fn cmp(&self, other: &TreeNode) -> Ordering {
self.id.cmp(&other.id)
}
}
impl Hash for TreeNode {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}
/// Star system info read from CSV
#[derive(Debug, Clone, Serialize, Deserialize, IntoPyObject)]
pub struct System {
/// Unique System id
pub id: u32,
/// Star system
pub name: String,
/// Number of bodies
pub num_bodies: u8,
/// Does the system have a scoopable star?
pub has_scoopable: bool,
/// Jump range multiplier (1.5 for white dwarfs, 4.0 for neutron stars, 1.0 otherwise)
pub mult: f32,
/// Position
pub pos: [f32; 3],
}
impl System {
fn get_flags(&self) -> u8 {
if self.mult == 4.0 {
return 0b11;
}
if self.mult == 1.5 {
return 0b10;
}
if self.has_scoopable {
return 0b01;
}
return 0b00;
}
}
impl ToPyObject for System {
fn to_object(&self, py: Python<'_>) -> PyObject {
let d = PyDict::new(py);
d.set_item("id", self.id).unwrap();
d.set_item("name", self.name.clone()).unwrap();
d.set_item("num_bodies", self.num_bodies).unwrap();
d.set_item("has_scoopable", self.has_scoopable).unwrap();
d.set_item("mult", self.mult).unwrap();
d.set_item("pos", (self.pos[0], self.pos[1], self.pos[2]))
.unwrap();
return d.to_object(py);
}
}
impl System {
pub fn to_node(&self) -> TreeNode {
TreeNode {
id: self.id,
pos: self.pos,
flags: self.get_flags(),
}
}
}
impl Ord for System {
fn cmp(&self, other: &Self) -> Ordering {
self.id.cmp(&other.id)
}
}
impl PartialOrd for System {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Debug)]
pub struct DQueue<T>(Vec<VecDeque<T>>);
impl<T> DQueue<T> {
pub fn new() -> Self {
Self(vec![])
}
pub fn enqueue(&mut self, depth: usize, item: T) {
self.0.resize_with(depth, VecDeque::new);
self.0[depth].push_back(item);
}
pub fn dequeue(&mut self, depth: usize) -> Option<T> {
self.0.resize_with(depth, VecDeque::new);
self.0[depth].pop_back()
}
}
impl<T> Default for DQueue<T> {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Default, Serialize, Deserialize)]
struct BKTreeNode {
ids: HashSet<u32, BuildHasherDefault<NoHashHasher<u32>>>,
children: HashMap<u8, Self, BuildHasherDefault<NoHashHasher<u8>>>,
}
impl BKTreeNode {
fn new(data: &[String], dist: &eddie::str::Levenshtein) -> Self {
let mut tree = Self::default();
let mut max_depth = 0;
(0..data.len())
.map(|id| {
max_depth = max_depth.max(tree.insert(data, id as u32, dist, 0));
if (id > 0) && (id % 100_000 == 0) {
println!("Inserting ID {}, Max Depth: {}", id, max_depth);
}
})
.max();
println!("Max Depth: {}", max_depth);
tree
}
fn from_id(id: u32) -> Self {
let mut ret = Self::default();
ret.ids.insert(id);
return ret;
}
fn insert(
&mut self,
data: &[String],
id: u32,
dist: &eddie::str::Levenshtein,
depth: usize,
) -> usize {
if self.is_empty() {
self.ids.insert(id);
return depth;
}
let idx = self.get_id().unwrap() as usize;
let dist_key = dist.distance(&data[idx], &data[id as usize]) as u8;
if dist_key == 0 {
self.ids.insert(id);
return depth;
}
if let Some(child) = self.children.get_mut(&dist_key) {
return child.insert(data, id, dist, depth + 1);
} else {
self.children.insert(dist_key, Self::from_id(id));
return depth;
}
}
fn get_id(&self) -> Option<u32> {
self.ids.iter().copied().next()
}
fn is_empty(&self) -> bool {
return self.ids.is_empty();
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct BKTree {
base_id: u32,
root: BKTreeNode,
}
impl BKTree {
pub fn new(data: &[String], base_id: u32) -> Self {
let dist = eddie::str::Levenshtein::new();
let root = BKTreeNode::new(data, &dist);
Self { base_id, root }
}
pub fn id(&self) -> u32 {
self.base_id
}
pub fn dump(&self, fh: &mut BufWriter<File>) -> EdLrrResult<()> {
let options = bincode::DefaultOptions::new();
let amt = options.serialized_size(self)?;
println!("Writing {}", amt);
options.serialize_into(fh, self)?;
Ok(())
}
pub fn lookup(&self, name: &str) -> u32 {
todo!();
}
}