ED_LRR/rust/src/route.rs

2614 lines
90 KiB
Rust

//! Route computation functions using various graph search algorithms
use crate::common::{
dist, dist2, distm, fcmp, heuristic, BeamWidth, MinFHeap, System, TreeNode, F32,
};
#[cfg(feature = "profiling")]
use crate::profiling::{span, Level};
use crate::ship::Ship;
use crossbeam_channel::{bounded, unbounded, Receiver, SendError, Sender};
use dashmap::{DashMap, DashSet};
use derivative::Derivative;
use dict_derive::IntoPyObject;
use humantime::format_duration;
use itertools::Itertools;
use log::*;
use permutohedron::LexicalPermutation;
use pyo3::prelude::*;
use pythonize::depythonize;
use rayon::prelude::*;
use rayon::ThreadPoolBuilder;
use rstar::{PointDistance, RStarInsertionStrategy, RTree, RTreeObject, RTreeParams, AABB};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use sha3::{Digest, Sha3_256};
use std::convert::TryFrom;
use std::fs::File;
use std::hash::{Hash, Hasher};
use std::io::{BufReader, BufWriter, Write};
use std::path::PathBuf;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
use std::thread;
use std::thread::JoinHandle;
use std::time::{Duration, Instant};
use std::{
collections::{BinaryHeap, VecDeque},
path::Path,
};
type RouterCallback = Box<dyn Fn(&SearchState) -> PyResult<PyObject> + Send>;
const STATUS_INVERVAL: u128 = 5000; //ms
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum ShipMode {
Fuel,
Jumps,
}
impl Default for ShipMode {
fn default() -> Self {
ShipMode::Fuel
}
}
#[derive(Deserialize, Serialize)]
enum PrecompTree {
Full { id: u32, map: Vec<u32> },
Partial { parent: u32, diff: Vec<(u32, u32)> },
}
#[derive(Debug)]
struct ShipRouteState {
cost: f32,
fuel: f32,
node: TreeNode,
refuels: usize,
depth: usize,
dist: f32,
mode: ShipMode,
}
impl Ord for ShipRouteState {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
if self.mode != other.mode {
panic!(
"Trying to compare incompatible states: {:?} and {:?}",
self.mode, other.mode
);
};
match self.mode {
ShipMode::Fuel => {
// (cost,refuels)
fcmp(self.cost, other.cost).then(self.refuels.cmp(&other.refuels))
}
ShipMode::Jumps => {
// (depth,refules,cost)
self.depth
.cmp(&other.depth)
.then(self.refuels.cmp(&other.refuels))
.then(fcmp(self.cost, other.cost))
}
}
.reverse()
}
}
impl PartialOrd for ShipRouteState {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for ShipRouteState {
fn eq(&self, other: &Self) -> bool {
self.node.id == other.node.id && self.depth == other.depth && self.refuels == other.refuels
}
}
impl Eq for ShipRouteState {}
struct Weight {
dist_from_start: f32,
dist_to_goal: f32,
dist_to_point: Vec<(f32, [f32; 3])>,
}
impl Weight {
fn calc(&self, node: &TreeNode, dst: &TreeNode, src: &TreeNode) -> f32 {
let d_total = dist(&src.pos, &dst.pos);
let d_start = (dist(&node.pos, &src.pos) / d_total) * self.dist_from_start;
let d_goal = (dist(&node.pos, &dst.pos) / d_total) * self.dist_to_goal;
let points: f32 = self
.dist_to_point
.iter()
.map(|&(f, p)| dist(&p, &node.pos) * f)
.sum();
return d_start + d_goal + points;
}
}
#[derive(Debug, Clone, IntoPyObject)]
pub struct SearchState {
pub mode: String,
pub system: String,
pub from: String,
pub to: String,
pub depth: usize,
pub queue_size: usize,
pub d_rem: f32,
pub d_total: f32,
pub prc_done: f32,
pub n_seen: usize,
pub prc_seen: f32,
}
fn default_greedyness() -> f32 {
0.5
}
#[derive(Debug, Deserialize)]
pub struct PyModeConfig {
#[serde(default)]
pub mode: String,
#[serde(default)]
pub beam_width: BeamWidth,
#[serde(default = "default_greedyness")]
pub greedyness: f32,
#[serde(default)]
pub ship: Option<Ship>,
#[serde(default)]
pub use_distance: bool,
#[serde(default)]
pub ship_mode: String,
}
#[derive(Debug)]
pub enum ModeConfig {
BreadthFirst {
beam_width: BeamWidth,
},
DepthFirst,
IncrementalBroadening, // TODO: implement IncrementalBroadening
BiDir,
AStar {
greedyness: f32,
beam_width: BeamWidth,
use_distance: bool,
},
Dijkstra,
Greedy,
Ship {
mode: ShipMode,
ship: Ship,
},
}
impl std::default::Default for PyModeConfig {
fn default() -> Self {
Self {
mode: "bfs".to_string(),
beam_width: BeamWidth::Absolute(8192),
greedyness: 0.0,
ship: None,
use_distance: false,
ship_mode: "jumps".to_string(),
}
}
}
impl std::fmt::Display for ModeConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ModeConfig::BreadthFirst { beam_width } => {
write!(f, "Bread-first search, beam width: {}", beam_width)
}
ModeConfig::DepthFirst => write!(f, "Depth-first search"),
ModeConfig::IncrementalBroadening => write!(f, "Incrementally broadening beam search"),
ModeConfig::BiDir => write!(f, "Bi-directional search"),
ModeConfig::AStar {
greedyness,
beam_width,
use_distance,
} => {
if *use_distance {
write!(
f,
"A*-Search, least distance, greedyness: {} %, beam width: {}",
greedyness, beam_width
)
} else {
write!(
f,
"A*-Search, least jumps, greedyness: {} %, beam width: {}",
greedyness, beam_width
)
}
}
ModeConfig::Dijkstra => write!(f, "Dijkstra shortest path"),
ModeConfig::Greedy => write!(f, "Greedy search"),
ModeConfig::Ship { mode, .. } => match mode {
ShipMode::Fuel => write!(f, "Ship: Least fuel consumption"),
ShipMode::Jumps => write!(f, "Ship: Least number of jumps"),
},
}
}
}
impl std::default::Default for ModeConfig {
fn default() -> Self {
Self::BreadthFirst {
beam_width: BeamWidth::Absolute(8192),
}
}
}
impl FromPyObject<'_> for PyModeConfig {
fn extract(ob: &PyAny) -> PyResult<Self> {
depythonize(ob).map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("{}", e)))
}
}
impl TryFrom<PyModeConfig> for ModeConfig {
type Error = pyo3::PyErr;
fn try_from(value: PyModeConfig) -> PyResult<Self> {
let ret = match value.mode.as_str() {
"bi_dir" => ModeConfig::BiDir,
"incremental_broadening" => ModeConfig::IncrementalBroadening,
"dijkstra" => ModeConfig::Dijkstra,
"a_star" | "astar" => ModeConfig::AStar {
greedyness: value.greedyness,
beam_width: value.beam_width,
use_distance: value.use_distance,
},
"dfs" | "depth_first" => ModeConfig::DepthFirst,
"bfs" | "breadth_first" => ModeConfig::BreadthFirst {
beam_width: value.beam_width,
},
"ship" => {
let ship_mode = match value.ship_mode.as_str() {
"jumps" => ShipMode::Jumps,
"fuel" => ShipMode::Fuel,
other => {
return Err(pyo3::exceptions::PyRuntimeError::new_err(format!(
"invalid ship mode: {}",
other
)))
}
};
ModeConfig::Ship {
ship: value.ship.ok_or_else(|| {
pyo3::exceptions::PyRuntimeError::new_err(
"missing ship config!".to_string(),
)
})?,
mode: ship_mode,
}
}
other => {
return Err(pyo3::exceptions::PyRuntimeError::new_err(format!(
"invalid mode: {}",
other
)))
}
};
if let ModeConfig::AStar {
greedyness,
beam_width,
use_distance: _,
} = &ret
{
if *greedyness >= 1.0 {
warn!("greedyness {}>=1.0, switching to greedy search", greedyness);
return Ok(ModeConfig::Greedy);
}
if *greedyness <= 0.0 {
warn!(
"greedyness {}<=0.0, switching to breadth-first search",
greedyness
);
return Ok(ModeConfig::BreadthFirst {
beam_width: beam_width.clone(),
});
}
}
return Ok(ret);
}
}
#[derive(Debug)]
#[allow(non_camel_case_types)]
pub enum PrecomputeMode {
Full,
Route_From(u32),
Route_To(u32),
None,
}
impl System {
pub fn dist2(&self, p: &[f32; 3]) -> f32 {
dist2(&self.pos, p)
}
pub fn distp(&self, p: &System) -> f32 {
dist(&self.pos, &p.pos)
}
pub fn distp2(&self, p: &System) -> f32 {
self.dist2(&p.pos)
}
}
impl TreeNode {
pub fn dist2(&self, p: &[f32; 3]) -> f32 {
dist2(&self.pos, p)
}
pub fn distp(&self, p: &System) -> f32 {
dist(&self.pos, &p.pos)
}
}
impl PartialEq for System {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for System {}
impl Hash for System {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}
impl RTreeObject for TreeNode {
type Envelope = AABB<[f32; 3]>;
fn envelope(&self) -> Self::Envelope {
AABB::from_point(self.pos)
}
}
impl PointDistance for TreeNode {
fn distance_2(&self, point: &[f32; 3]) -> f32 {
self.dist2(point)
}
}
fn hash_file(path: &Path) -> Vec<u8> {
let mut hash_reader = BufReader::new(File::open(path).unwrap());
let mut hasher = Sha3_256::new();
std::io::copy(&mut hash_reader, &mut hasher).unwrap();
hasher.finalize().iter().copied().collect()
}
pub struct LineCache {
cache: Vec<u64>,
reader: csv::Reader<File>,
}
impl LineCache {
pub fn new(path: &Path) -> Result<Arc<Mutex<Self>>, String> {
Ok(Arc::new(Mutex::new(Self::create(path)?)))
}
pub fn create(path: &Path) -> Result<Self, String> {
use crate::common::build_index;
let stars_hash = hash_file(path);
let idx_path = path.with_extension("idx");
if !idx_path.exists() {
warn!("No index found for {:?}, building...", path);
build_index(path).map_err(|e| format!("Error creating index for {:?}: {}", path, e))?;
}
let (hash, cache): (Vec<u8>, Vec<u8>) = bincode::deserialize_from(&mut BufReader::new(
File::open(idx_path)
.map_err(|e| format!("Error opening index for {:?}: {}", path, e))?,
))
.map_err(|e| format!("Reading index for {:?}: {}", path, e))?;
if hash != stars_hash {
return Err(format!("Missmatched hash for {:?}", path));
}
let reader = csv::ReaderBuilder::new()
.has_headers(false)
.from_path(path)
.map_err(|e| format!("Error opening csv file {:?}: {}", path, e))?;
let cache: Vec<u64> = cache
.iter()
.scan(0u64, |s, &v| {
*s += v as u64;
Some(*s)
})
.collect();
Ok(Self { reader, cache })
}
fn read_sys(&mut self) -> Result<Option<System>, String> {
self.reader
.deserialize()
.next()
.transpose()
.map_err(|e| format!("{}", e))
}
pub fn get(&mut self, id: u32) -> Result<Option<System>, String> {
let mut pos = csv::Position::new();
pos.set_byte(self.cache[id as usize]);
match self.reader.seek(pos) {
Ok(_) => self.read_sys(),
Err(_) => Ok(None),
}
}
pub fn len(&self) -> usize {
self.cache.len()
}
pub fn is_empty(&self) -> bool {
self.cache.is_empty()
}
}
pub struct LargeNodeParameters;
impl RTreeParams for LargeNodeParameters {
const MIN_SIZE: usize = 200;
const MAX_SIZE: usize = 400;
const REINSERTION_COUNT: usize = 100;
type DefaultInsertionStrategy = RStarInsertionStrategy;
}
pub type LargeNodeRTree<T> = RTree<T, LargeNodeParameters>;
#[derive(Debug, Clone)]
struct WorkUnit {
node: TreeNode,
depth: usize,
parent_id: Option<u32>,
range: f32,
}
#[derive(Debug)]
enum WorkerSet {
Empty,
Workers {
handles: Vec<JoinHandle<()>>,
tx: Sender<Option<WorkUnit>>,
rx: Receiver<Vec<WorkUnit>>,
},
}
impl WorkerSet {
fn new(tree: Arc<LargeNodeRTree<TreeNode>>, num_workers: usize) -> Self {
if num_workers == 0 {
return WorkerSet::Empty;
}
let (jobs_tx, jobs_rx) = unbounded();
let (result_tx, result_rx) = bounded(100_000);
let handles = (0..num_workers)
.map(|_| {
thread::spawn({
let rx = jobs_rx.clone();
let tx = result_tx.clone();
let tree = tree.clone();
move || {
Self::work(&tree, rx, tx);
}
})
})
.collect();
return WorkerSet::Workers {
handles,
tx: jobs_tx,
rx: result_rx,
};
}
fn work(
tree: &LargeNodeRTree<TreeNode>,
rx: Receiver<Option<WorkUnit>>,
tx: Sender<Vec<WorkUnit>>,
) {
#[cfg(feature = "profiling")]
let span = span!(Level::INFO, "nb_worker");
#[cfg(feature = "profiling")]
let guard = span.enter();
while let Ok(Some(unit)) = rx.recv() {
let range = unit.range * unit.node.get_mult();
let res = tree
.locate_within_distance(unit.node.pos, range * range)
.cloned()
.map(|nb| WorkUnit {
node: nb,
depth: unit.depth + 1,
parent_id: Some(unit.node.id),
range: unit.range,
})
.collect();
tx.send(res).unwrap();
}
drop(tx);
#[cfg(feature = "profiling")]
drop(guard);
}
fn resize(&mut self, tree: Arc<LargeNodeRTree<TreeNode>>, num: usize) -> Result<(), String> {
let mut new_set = WorkerSet::new(tree, num);
std::mem::swap(self, &mut new_set);
new_set.close()?;
Ok(())
}
// fn replace(self, tree: Arc<LargeNodeRTree<TreeNode>>) -> Result<Self, String> {
// let num=self.num();
// return self.resize(tree.clone(),num);
// }
fn close(self) -> Result<(), String> {
#[cfg(feature = "profiling")]
let _span = span!(Level::INFO, "nb_worker:close");
if let WorkerSet::Workers {
mut handles,
tx,
rx,
} = self
{
let t_start = Instant::now();
loop {
if rx.is_empty() && tx.is_empty() {
break;
}
rx.try_iter().for_each(|_| {});
}
for _ in &handles {
match tx.send(None) {
Ok(_) => {}
Err(e) => {
return Err(format!("{:?}", e));
}
}
}
drop(tx);
while let Some(handle) = handles.pop() {
handle.join().unwrap();
}
drop(rx);
info!(
"workerset cleared in {}",
format_duration(t_start.elapsed())
);
}
return Ok(());
}
fn queue_size(&self) -> usize {
match self {
WorkerSet::Empty => 0,
WorkerSet::Workers { rx, tx, .. } => tx.len() + rx.len(),
}
}
fn queue_empty(&self) -> bool {
return self.queue_size() == 0;
}
fn send(&self, wu: WorkUnit) -> Result<(), SendError<Option<WorkUnit>>> {
match self {
WorkerSet::Empty => {
panic!("send() on empty WorkerSet");
}
WorkerSet::Workers { tx, .. } => {
return tx.send(Some(wu));
}
}
}
fn num(&self) -> usize {
match self {
WorkerSet::Empty => 1,
WorkerSet::Workers { handles, .. } => handles.len(),
}
}
fn is_empty(&self) -> bool {
match self {
WorkerSet::Empty => true,
WorkerSet::Workers { handles, .. } => handles.len() == 0,
}
}
fn recv(&self) -> Result<Vec<WorkUnit>, String> {
match self {
WorkerSet::Empty => Ok(vec![]),
WorkerSet::Workers { rx, .. } => rx.recv().map_err(|e| format!("{:?}", e)),
}
}
// impl Iterator<Item = &TreeNode>
// fn join(mut self) -> thread::Result<()> {
// drop(self.tx);
// drop(self.rx);
// let ret: thread::Result<Vec<_>> = self.handles.drain(..).map(|v| v.join()).collect();
// ret?;
// return Ok(());
// }
}
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Clone, Copy)]
enum BiDirNode {
Forward(TreeNode),
Backwards(TreeNode),
}
#[derive(Derivative)]
#[derivative(Debug)]
pub struct Router {
#[derivative(Debug = "ignore")]
tree: Arc<LargeNodeRTree<TreeNode>>,
#[derivative(Debug = "ignore")]
scoopable: FxHashSet<u32>,
#[derivative(Debug = "ignore")]
pub route_tree: Option<FxHashMap<u32, u32>>,
#[derivative(Debug = "ignore")]
pub cache: Option<Arc<Mutex<LineCache>>>,
pub path: PathBuf,
pub primary_only: bool,
#[derivative(Debug = "ignore")]
workers: WorkerSet,
#[derivative(Debug = "ignore")]
pub callback: Option<RouterCallback>,
filter: Option<eval::Function>,
weight: Option<eval::Function>,
}
impl Default for Router {
fn default() -> Self {
Self {
tree: Arc::new(LargeNodeRTree::default()),
scoopable: FxHashSet::default(),
route_tree: None,
cache: None,
callback: None,
primary_only: false,
workers: WorkerSet::Empty,
path: PathBuf::from(""),
filter: None,
weight: None,
}
}
}
impl Router {
pub fn new() -> Self {
Self::default()
}
pub fn set_callback(&mut self, callback: RouterCallback) {
self.callback = Some(callback);
}
pub fn unload(&mut self) {
self.path = PathBuf::from("");
self.tree = Arc::new(LargeNodeRTree::bulk_load_with_params(vec![]));
}
#[cfg_attr(feature = "profiling", tracing::instrument)]
pub fn load(&mut self, path: &Path) -> Result<(), String> {
if self.path == path {
return Ok(());
}
let mut scoopable = FxHashSet::default();
let mut reader = match csv::ReaderBuilder::new().has_headers(false).from_path(path) {
Ok(rdr) => rdr,
Err(e) => {
return Err(format!("Error opening {}: {}", path.display(), e));
}
};
let t_load = Instant::now();
info!("Loading [{}]", path.display());
let systems: Vec<TreeNode> = reader
.deserialize::<System>()
.map(|res| {
let sys = res.map_err(|e| format!("{}", e))?;
if (sys.mult > 1.0f32) || (sys.has_scoopable) {
scoopable.insert(sys.id);
}
Ok(sys.to_node())
})
.collect::<Result<Vec<TreeNode>, String>>()?;
info!(
"{} Systems loaded in {}",
systems.len(),
format_duration(t_load.elapsed())
);
let t_load = Instant::now();
self.tree = Arc::new(LargeNodeRTree::bulk_load_with_params(systems));
info!("R*-Tree built in {}", format_duration(t_load.elapsed()));
self.path = PathBuf::from(path);
self.cache = LineCache::new(path)
.map_err(|e| error!("Error creating cache: {}", e))
.ok();
self.scoopable = scoopable;
Ok(())
}
pub fn get(&self, id: u32) -> Result<Option<System>, String> {
let mut cache = self.cache.as_ref().unwrap().lock().unwrap();
cache.get(id)
}
pub fn get_tree(&self) -> &LargeNodeRTree<TreeNode> {
return &*self.tree;
}
fn start_workers(&mut self, num: usize) -> Result<(), String> {
self.workers.resize(self.tree.clone(), num)
}
#[cfg_attr(feature = "profiling", tracing::instrument)]
pub fn from_file(filename: &Path) -> Result<(PathBuf, f32, Self), String> {
let mut reader = BufReader::new(match File::open(&filename) {
Ok(fh) => fh,
Err(e) => return Err(format!("Error opening file {}: {}", filename.display(), e)),
});
info!("Loading {}", filename.display());
let (primary, range, file_hash, path, route_tree): (
bool,
f32,
Vec<u8>,
PathBuf,
FxHashMap<u32, u32>,
) = match bincode::deserialize_from(&mut reader) {
Ok(res) => res,
Err(e) => return Err(format!("Error loading file {}: {}", filename.display(), e)),
};
if hash_file(&path) != file_hash {
return Err("File hash mismatch!".to_string());
}
let cache =
Some(LineCache::new(&path).map_err(|e| format!("Error creating cache: {}", e))?);
Ok((
path.clone(),
range,
Self {
tree: Arc::new(RTree::default()),
scoopable: FxHashSet::default(),
route_tree: Some(route_tree),
cache,
path,
callback: None,
primary_only: primary,
workers: WorkerSet::Empty,
filter: None,
weight: None,
},
))
}
pub fn closest(&self, center: &[f32; 3]) -> Option<System> {
self.tree.nearest_neighbor(center)?.get(self).unwrap()
}
fn points_in_sphere(&self, center: &[f32; 3], radius: f32) -> impl Iterator<Item = &TreeNode> {
self.tree.locate_within_distance(*center, radius * radius)
}
pub fn neighbours(&self, node: &TreeNode, range: f32) -> impl Iterator<Item = &TreeNode> {
self.points_in_sphere(&node.pos, range)
}
fn neighbours_r(&self, node: &TreeNode, range: f32) -> impl Iterator<Item = &TreeNode> {
let pos = node.pos;
let r2 = range * range;
self.points_in_sphere(&node.pos, range * 4.0)
.filter(move |s| {
return s.dist2(&pos) < (r2 * s.get_mult() * s.get_mult());
})
}
fn line_dist(&self, node: &TreeNode, start: &TreeNode, end: &TreeNode) -> f32 {
let c = dist(&start.pos, &end.pos);
let a = dist(&node.pos, &end.pos);
let b = dist(&start.pos, &node.pos);
return ((a + b + c) * (-a + b + c) * (a - b + c) * (a + b - c)).sqrt() / (c * 2.0);
}
fn valid(&self, id: u32) -> bool {
if self.scoopable.is_empty() {
return true;
}
return self.scoopable.contains(&id);
}
pub fn bfs_loop_test(&self, range: f32, source: &TreeNode, goal: &TreeNode, n: usize) -> (bool, usize, usize) {
// info!("Starting thread pool");
// ThreadPoolBuilder::new()
// .num_threads(8)
// .build_global()
// .unwrap();
let t_start = Instant::now();
let route_dist = dist(&source.pos, &goal.pos);
let seen: Arc<DashMap<u32, u32>> = Arc::new(DashMap::new());
let mut depth = 0;
let mut queue = vec![*source];
let mut queue_next = vec![];
let tree = self.tree.clone();
let r2 = range * range;
let mut found = false;
while !queue.is_empty() {
depth += 1;
let seen = seen.clone();
queue_next.extend(queue.drain(..).flat_map(|sys| {
let seen = seen.clone();
tree.locate_within_distance(sys.pos, r2)
.filter_map(move |nb| seen.insert(nb.id, sys.id).is_none().then_some(*nb))
}));
if seen.contains_key(&goal.id) {
found = true;
break;
}
std::mem::swap(&mut queue_next, &mut queue);
if n != 0 {
queue.sort_by_cached_key(|v| F32(heuristic(range, v, goal)));
queue.truncate(n);
}
// info!("[{}|{}] {}", goal.id, depth, queue.len());
}
let seen = Arc::try_unwrap(seen)
.unwrap()
.into_iter()
.collect::<FxHashMap<u32, u32>>();
info!(
"[{}|{}->{} ({:.02} Ly)|{}] Depth: {} Seen: {} ({:.02}%) Took: {}",
n,
source.id,
goal.id,
route_dist,
found,
depth,
seen.len(),
((seen.len() as f64) / (tree.size() as f64)) * 100.0,
humantime::format_duration(t_start.elapsed())
);
return (found, depth, seen.len());
let path=self.reconstruct(goal.id, &seen);
}
fn reconstruct(&self, goal_id: u32, map: &FxHashMap<u32, u32>) -> Result<Vec<System>, String> {
let mut path = vec![];
let mut current = goal_id;
while let Some(next) = map.get(&current) {
path.push(
self.get(*next)?
.ok_or(format!("System ID {} not found", next))?,
);
current = *next;
}
path.reverse();
Ok(path)
}
fn best_multiroute(
&mut self,
waypoints: &[System],
range: f32,
keep: (bool, bool),
mode: ModeConfig,
_max_dist: f32,
num_workers: usize,
) -> Result<Vec<System>, String> {
let mut best_score: f32 = std::f32::MAX;
let mut waypoints = waypoints.to_owned();
let mut best_permutation_waypoints = waypoints.to_owned();
let first = waypoints.first().cloned();
let last = waypoints.last().cloned();
info!("Finding best permutation of hops...");
while waypoints.prev_permutation() {}
loop {
let c_first = waypoints.first().cloned();
let c_last = waypoints.last().cloned();
let valid = (keep.0 && (c_first == first)) && (keep.1 && (c_last == last));
if valid {
let mut total_d = 0.0;
for pair in waypoints.windows(2) {
match pair {
[src, dst] => {
total_d += src.distp2(dst);
}
_ => return Err("Invalid routing parameters!".to_string()),
}
}
if total_d < best_score {
best_score = total_d;
best_permutation_waypoints = waypoints.to_owned();
}
}
if !waypoints.next_permutation() {
break;
}
}
info!("Best permutation: {:?}", best_permutation_waypoints);
self.multiroute(&best_permutation_waypoints, range, mode, num_workers)
}
fn multiroute(
&mut self,
waypoints: &[System],
range: f32,
mode: ModeConfig,
num_workers: usize,
) -> Result<Vec<System>, String> {
if self.tree.size() == 0 {
return Err("No Systems loaded, pleased load some with the 'load' method!".to_string());
}
if num_workers != 0 {
self.start_workers(num_workers)?;
}
let mut route = vec![];
for pair in waypoints.windows(2) {
match pair {
[src, dst] => {
let d_total = dist(&src.pos, &dst.pos);
info!("Plotting route from [{}] to [{}]...", src.name, dst.name);
info!(
"Jump Range: {} Ly, Distance: {} Ly, Estimated Jumps: {}",
range,
d_total,
d_total / range
);
info!("Mode: {}", mode);
let max_dist = std::f32::NAN;
let block = match &mode {
ModeConfig::AStar {
greedyness,
beam_width,
use_distance,
} => {
if *use_distance {
todo!();
}
self.route_astar(src, dst, *greedyness, beam_width, range, max_dist)
}
ModeConfig::DepthFirst => self.route_dfs(src, dst, range),
ModeConfig::BreadthFirst { beam_width } => {
self.route_bfs(src, dst, range, beam_width, max_dist)
}
ModeConfig::IncrementalBroadening => {
self.route_incremental_broadening(range)
}
ModeConfig::Dijkstra => self.route_dijkstra(range),
ModeConfig::Ship { ship, mode } => self.route_ship(src, dst, ship, mode),
ModeConfig::BiDir => self.route_bidir(src, dst, range),
ModeConfig::Greedy => self.route_greedy(src, dst, range),
}?;
if route.is_empty() {
for sys in block.iter() {
route.push(sys.clone());
}
} else {
for sys in block.iter().skip(1) {
route.push(sys.clone());
}
}
}
_ => {
return Err("Invalid routing parameters!".to_owned());
}
}
}
let mut workers = WorkerSet::Empty;
std::mem::swap(&mut self.workers, &mut workers);
workers.close()?;
Ok(route)
}
fn route_astar(
&mut self,
src: &System,
dst: &System,
factor: f32,
beam_width: &BeamWidth,
range: f32,
max_dist: f32,
) -> Result<Vec<System>, String> {
let mut log_file = BufWriter::new(File::create("route_log.txt").unwrap());
if factor == 0.0 {
return self.route_bfs(src, dst, range, beam_width, max_dist);
}
if (1.0 - factor).abs() < 1e-3 {
if beam_width.is_set() {
warn!("Usign greedy algorithm, ignorimg beam width!")
}
return self.route_greedy(src, dst, range);
}
let mut factor = factor;
if !(0.0..=1.0).contains(&factor) {
let new_factor = factor.min(1.0).max(0.0);
warn!(
"Greedyness of {} is out of range 0.0-1.0, clamping to {}",
factor, new_factor
);
factor = new_factor;
}
let src_name = src.name.clone();
let dst_name = dst.name.clone();
let start_sys = src;
let goal_sys = dst;
let goal_node = goal_sys.to_node();
let d_total = dist(&start_sys.pos, &goal_sys.pos);
let mut d_rem = d_total;
let mut state = SearchState {
mode: "A-Star".into(),
depth: 0,
queue_size: 0,
d_rem: d_total,
d_total,
prc_done: 0.0,
n_seen: 0,
prc_seen: 0.0,
from: src_name.clone(),
to: dst_name.clone(),
system: start_sys.name.clone(),
};
let total = self.tree.size() as f32;
// let mut seen_v = vec![0u64;(self.tree.size()>>8)+1];
let mut t_last = Instant::now();
let mut prev = FxHashMap::default();
let mut seen: FxHashMap<u32, f32> = FxHashMap::default();
let mut found = false;
let mut queue: MinFHeap<(usize, TreeNode)> = MinFHeap::new();
let h = distm(&start_sys.pos, &goal_sys.pos) / range;
queue.push(
h,
(
0, // depth
start_sys.to_node(),
),
);
seen.insert(start_sys.id, 0.0);
while !found {
while let Some((_, (depth, node))) = queue.pop() {
writeln!(log_file, "{},{}", node.id, depth).unwrap();
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
let sys = node
.get(self)?
.unwrap_or_else(|| panic!("System-ID {} not found!", node.id));
t_last = Instant::now();
state.depth = depth;
state.queue_size = queue.len();
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
state.d_rem = d_rem;
state.n_seen = seen.len();
state.prc_seen = ((seen.len() * 100) as f32) / total;
state.system = sys.name.clone();
if let Some(cb) = &self.callback {
match cb(&state) {
Ok(_) => (),
Err(e) => {
return Err(format!("{:?}", e));
}
};
}
}
if node.id == goal_sys.id {
queue.clear();
found = true;
break;
}
let new_nodes: Vec<_> = self
.neighbours(&node, node.get_mult() * range)
.filter(|nb| (self.valid(nb.id) || (nb.id == goal_sys.id)))
.filter(|nb| !seen.contains_key(&nb.id))
.map(|nb| {
prev.insert(nb.id, node);
let d_g = nb.distp(goal_sys);
if d_g < d_rem {
d_rem = d_g;
}
let h = heuristic(range, nb, &goal_node) * factor;
let f = (depth as f32) * (1.0 - factor);
(h + f, (depth + 1, *nb))
})
.collect();
for (w, node) in new_nodes {
seen.insert(node.1.id, 0.0);
queue.push(w, node);
}
}
if queue.is_empty() {
break;
}
}
if !found {
return Err(format!("No route from {} to {} found!", src_name, dst_name));
}
let mut v: Vec<System> = Vec::new();
let mut curr_sys = goal_sys.clone();
loop {
v.push(curr_sys.clone());
match prev.get(&curr_sys.id) {
Some(sys) => curr_sys = sys.get(self)?.unwrap(),
None => {
break;
}
}
}
v.reverse();
Ok(v)
}
fn route_greedy(&self, src: &System, dst: &System, range: f32) -> Result<Vec<System>, String> {
let mut log_file = BufWriter::new(File::create("route_log.txt").unwrap());
let src_name = src.name.clone();
let dst_name = dst.name.clone();
let start_sys = src;
let goal_sys = dst;
let start_node = src.to_node();
let goal_node = dst.to_node();
let d_total = dist(&start_sys.pos, &goal_sys.pos);
let mut d_rem = d_total;
let mut state = SearchState {
mode: "Greedy".into(),
depth: 0,
queue_size: 0,
d_rem: d_total,
d_total,
prc_done: 0.0,
n_seen: 0,
prc_seen: 0.0,
from: src_name.clone(),
to: dst_name.clone(),
system: start_sys.name.clone(),
};
let total = self.tree.size() as f32;
let mut t_last = Instant::now();
let mut prev = FxHashMap::default();
let mut seen: FxHashMap<u32, f32> = FxHashMap::default();
let mut found = false;
let mut queue: MinFHeap<(usize, TreeNode)> = MinFHeap::new();
queue.push(
heuristic(range, &start_node, &goal_node),
(0, start_sys.to_node()),
);
seen.insert(start_sys.id, 0.0);
while !found {
while let Some((_, (depth, node))) = queue.pop() {
writeln!(log_file, "{},{}", node.id, depth).unwrap();
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
let sys = node
.get(self)?
.unwrap_or_else(|| panic!("System-ID {} does not exist!", &node.id));
t_last = Instant::now();
state.depth = depth;
state.queue_size = queue.len();
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
state.d_rem = d_rem;
state.n_seen = seen.len();
state.prc_seen = ((seen.len() * 100) as f32) / total;
state.system = sys.name.clone();
if let Some(cb) = &self.callback {
match cb(&state) {
Ok(_) => (),
Err(e) => {
return Err(format!("{:?}", e));
}
};
}
}
if node.id == goal_sys.id {
queue.clear();
found = true;
break;
}
let new_nodes: Vec<_> = self
.neighbours(&node, node.get_mult() * range)
.filter(|nb| (self.valid(nb.id) || (nb.id == goal_sys.id)))
.filter(|nb| !seen.contains_key(&nb.id))
.map(|nb| {
prev.insert(nb.id, node);
let d_g = nb.distp(goal_sys);
if d_g < d_rem {
d_rem = d_g;
}
*nb
})
.collect();
for node in new_nodes {
seen.insert(node.id, 0.0);
let h = heuristic(range, &node, &goal_node);
queue.push(h, (depth + 1, node));
}
}
if queue.is_empty() {
break;
}
}
if !found {
return Err(format!("No route from {} to {} found!", src_name, dst_name));
}
let mut v: Vec<System> = Vec::new();
let mut curr_sys = goal_sys.clone();
loop {
v.push(curr_sys.clone());
match prev.get(&curr_sys.id) {
Some(sys) => curr_sys = sys.get(self)?.unwrap(),
None => {
break;
}
}
}
v.reverse();
Ok(v)
}
fn precomp_spt(
&mut self,
root: TreeNode,
range: f32,
skiplist: &[bool],
_parent_tree: &Option<String>,
) -> PrecompTree {
let t_start = Instant::now();
let skiplist_size = skiplist.iter().filter(|&&v| v).count();
let mut skipped: usize = 0;
let mut edges = 0usize;
let mut n: usize = 0;
let mut prev = vec![std::u32::MAX; self.tree.size()];
let root_id = root.id;
{
let mut to_visit = FxHashSet::default();
let mut visited = FxHashSet::default();
let (tx, rx, mut handles) = self.neighbor_workers(8, range);
let chunks = skiplist.iter().enumerate().map(|(id, v)| (id as u32, v));
let chunks = chunks
.inspect(|&(id, _)| {
visited.insert(id as u32);
if id % 100_000 == 0 {
println!("ID: {}", id);
}
})
.filter(|(_, v)| !*v)
.map(|(id, _)| id);
for chunk in &chunks.chunks(100_000) {
let chunk = chunk.collect_vec();
let res = self
.get_systems_by_ids(&chunk)
.unwrap()
.values()
.map(|sys| sys.to_node())
.collect_vec();
tx.send(res).unwrap();
for res in rx.try_iter() {
for (_node, nbs) in res {
nbs.iter().for_each(|nb| {
to_visit.insert(nb.id);
});
}
}
}
drop(tx);
while let Ok(res) = rx.recv() {
for (_node, nbs) in res {
nbs.iter().for_each(|nb| {
to_visit.insert(nb.id);
});
}
}
drop(rx);
for handle in handles.drain(..) {
handle.join().unwrap();
}
println!("new queue: {} Nodes", to_visit.len());
}
let mut q = VecDeque::new();
prev[root.id as usize] = root.id;
q.push_back(root);
while let Some(node) = q.pop_front() {
if n % 100_000 == 0 {
let p_prc: f64 = ((n as f64) / (self.tree.size() as f64)) * 100.0;
let s_prc: f64 = ((skipped as f64) / (skiplist.len() as f64)) * 100.0;
println!(
"[{}] {}/{} ({:.2} %) | Q: {} | S: {}/{} ({:.2} %) ",
root_id,
n,
self.tree.size(),
p_prc,
q.len(),
skipped,
skiplist_size,
s_prc
);
}
let mut nbs = self
.neighbours(&node, node.get_mult() * range)
.collect::<Vec<_>>();
nbs.sort_by_key(|n| F32(-(dist(&root.pos, &n.pos) + (n.get_mult() * range))));
for nb in nbs {
if skiplist[nb.id as usize] {
skipped += 1;
continue;
}
edges += 1;
if prev[nb.id as usize] == std::u32::MAX {
prev[nb.id as usize] = node.id;
q.push_back(*nb);
}
}
n += 1;
}
println!(
"{} | Nodes: {}/{}, Edges: {}",
format_duration(t_start.elapsed()),
prev.iter().filter(|&&v| v != std::u32::MAX).count(),
self.tree.size(),
edges
);
if skiplist_size == 0 {
return PrecompTree::Full {
id: root_id,
map: prev,
};
} else {
let mut spt_map = FxHashMap::default();
for (n, &v) in prev.iter().enumerate() {
if v == std::u32::MAX || n == (v as usize) {
continue;
}
spt_map.insert(n as u32, v);
}
todo!();
// return PrecompTree::Partial {
// parent: parent_tree.clone().unwrap(),
// remove: FxHashSet::default(),
// links: spt_map,
// };
}
}
pub fn compute_best_diff(&self, paths: &[&str]) -> Result<(u32, u32, Vec<(u32, u32)>), String> {
// let inverse_spt = FxHashMap<u32, FxHashSet<u32>>
let mut trees = Vec::new();
for &path in paths {
let reader = BufReader::new(File::open(&path).unwrap());
let spt: PrecompTree = bincode::deserialize_from(reader).unwrap();
let spt = match spt {
PrecompTree::Full { id, map } => (id, map),
PrecompTree::Partial { .. } => return Err("Need full tree!".to_owned()),
};
trees.push(spt);
}
let mut best = (std::usize::MAX, (0, 0, vec![]));
for (i1, (id_1, t1)) in trees.iter().enumerate() {
for (_i2, (id_2, t2)) in trees.iter().enumerate().skip(i1 + 1) {
if t1.len() != t2.len() {
println!("Length missmatch between {} and {}", id_1, id_2);
continue;
}
let diff: Vec<(u32, u32)> = t1
.iter()
.zip(t2)
.enumerate()
.filter(|(_, (a, b))| a != b)
.map(|(i, (_, b))| (i as u32, *b))
.collect();
if diff.len() < best.0 {
best = (diff.len(), (*id_1, *id_2, diff));
}
}
}
return Ok(best.1);
}
pub fn precomp_bfs(&mut self, range: f32) -> Result<(), String> {
// add subtree nodes to visited set
// add all neighbors of visited nodes which are not in visited set to search queue
// =================================
// let tree_diff =
// self.compute_best_diff(&[r#"O:\spt_740186_48.dat"#, r#"O:\spt_32861765_48.dat"#])?;
// let out_path = format!(r#"O:\spt_diff_{}_{}.dat"#, tree_diff.0, tree_diff.1);
// let buf_writer = BufWriter::new(File::create(&out_path).unwrap());
// bincode::serialize_into(buf_writer, &tree_diff).unwrap();
// println!("Overlap: {:.2} % ", ol * 100.0);
let mut skiplist_vec = vec![false; self.tree.size()];
let root = *self.tree.locate_at_point(&[0.0, 0.0, 0.0]).unwrap();
let out_path = format!(r#"O:\spt_{}_{}.dat"#, root.id, range);
if !Path::new(&out_path).exists() {
let spt = self.precomp_spt(root, range, &skiplist_vec, &None);
let buf_writer = BufWriter::new(File::create(&out_path).unwrap());
bincode::serialize_into(buf_writer, &spt).unwrap();
};
let reader = BufReader::new(File::open(&out_path).unwrap());
let spt: PrecompTree = bincode::deserialize_from(reader).unwrap();
let spt = match spt {
PrecompTree::Full { map, .. } => map,
PrecompTree::Partial { .. } => panic!("Need full root tree!"),
};
let t_start = Instant::now();
let mut inverse_spt: FxHashMap<u32, FxHashSet<u32>> = FxHashMap::default();
for (n, &v) in spt.iter().enumerate() {
if v == std::u32::MAX || n == (v as usize) {
continue;
}
inverse_spt.entry(v).or_default().insert(n as u32);
}
let tree_deg =
inverse_spt.values().map(|v| v.len() as f64).sum::<f64>() / (inverse_spt.len() as f64);
println!("Tree inversion took: {:?}", t_start.elapsed());
println!("Mean degree of SPT: {}", tree_deg);
let t_start = Instant::now();
let mut best = (FxHashSet::default(), std::u32::MAX);
for vert in inverse_spt.get(&root.id).iter().flat_map(|l| l.iter()) {
// compute all paths that terminate at vert using our reversed tree
let mut skiplist = FxHashSet::default();
let mut q = VecDeque::new();
q.push_back(*vert);
while let Some(next) = q.pop_front() {
skiplist.insert(next);
if let Some(next_nbs) = inverse_spt.get(&next) {
q.extend(next_nbs.iter().filter(|v| !skiplist.contains(v)).sorted());
}
}
if skiplist.len() > best.0.len() {
best = (skiplist, *vert);
}
}
println!(
"Largest subtree: [{}] {} ({:.2} %)",
best.1,
best.0.len(),
((best.0.len() as f64) / (self.tree.size() as f64)) * 100.0
);
for id in best.0 {
skiplist_vec[id as usize] = true;
}
let node = self.get(best.1).unwrap().unwrap().to_node();
println!(
"Found optimal next node in {}: {:?}",
format_duration(t_start.elapsed()),
node
);
let sub_spt = self.precomp_spt(node, range, &skiplist_vec, &Some(out_path));
let out_path = format!(r#"O:\spt_{}_{}.dat"#, node.id, range);
let buf_writer = BufWriter::new(File::create(&out_path).unwrap());
bincode::serialize_into(buf_writer, &sub_spt).unwrap();
return Ok(());
}
fn route_ship(
&mut self,
start_sys: &System,
goal_sys: &System,
ship: &Ship,
mode: &ShipMode,
) -> Result<Vec<System>, String> {
let t_start = Instant::now();
let mut found = false;
let mut num: usize = 0;
let mut skipped: usize = 0;
let mut unreachable: usize = 0;
const INF: f32 = std::f32::INFINITY;
let mut queue: BinaryHeap<ShipRouteState> = BinaryHeap::new();
let mut best: FxHashMap<u32, f32> = FxHashMap::default();
let mut prev: FxHashMap<u32, u32> = FxHashMap::default();
let max_range = ship.max_range();
let start_node = start_sys.to_node();
let goal_node = goal_sys.to_node();
let state = ShipRouteState {
cost: 0.0,
fuel: ship.fuel_capacity,
node: start_node,
refuels: 0,
depth: 0,
dist: 0.0,
mode: *mode,
};
queue.push(state);
let mut last_new = Instant::now();
while let Some(state) = queue.pop() {
if state.node.id == goal_node.id {
found = true;
break;
}
if num % 100_000 == 0 {
info!(
"D: ({}, {}) | FC: ({}, {}) | N: {} ({}) | B: {} ({}) | Q: {} | UR: {} | SK: {}",
state.depth,
state.dist,
state.refuels,
state.cost,
num,
prev.len(),
best.len(),
humantime::format_duration(last_new.elapsed()),
queue.len(),
unreachable,
skipped
);
}
num += 1;
let best_cost = *best.get(&state.node.id).unwrap_or(&INF);
if state.cost > best_cost {
skipped += 1;
continue;
}
for nb in self.neighbours(&state.node, max_range * state.node.get_mult()) {
let mut refuels = state.refuels;
let dist = dist(&nb.pos, &state.node.pos);
let (fuel_cost, new_fuel) = {
if let Some(res) =
ship.fuel_cost_for_jump(state.fuel, dist, state.node.get_mult())
{
// can jump with current amount of fuel
res
} else if let Some(res) =
ship.fuel_cost_for_jump(ship.fuel_capacity, dist, state.node.get_mult())
{
// can jump after refuel
refuels += 1;
res
} else {
// can't jump
unreachable += 1;
continue;
}
};
let next_cost = *best.get(&nb.id).unwrap_or(&INF);
let new_cost = state.cost + fuel_cost;
if new_cost < next_cost {
last_new = Instant::now();
best.insert(nb.id, new_cost);
prev.insert(nb.id, state.node.id);
queue.push(ShipRouteState {
cost: new_cost,
fuel: new_fuel,
node: *nb,
refuels,
depth: state.depth + 1,
dist: state.dist + dist,
mode: state.mode,
});
}
}
}
info!("Took: {}", format_duration(t_start.elapsed()));
if !found {
return Err(format!(
"No route from {} to {} found!",
start_sys.name, goal_sys.name
));
}
// todo!("Fxi path reconstruction");
let mut v: Vec<System> = Vec::new();
let mut curr_sys = goal_sys.clone();
loop {
v.push(curr_sys.clone());
if curr_sys.id == start_sys.id {
break;
}
match prev.get(&curr_sys.id) {
Some(sys) => {
curr_sys = self
.get_sys(*sys)?
.ok_or(format!("System id {} not found", sys))?
}
None => {
break;
}
}
}
v.reverse();
Ok(v)
}
pub fn closest_neutron(&self, node: &TreeNode) -> Option<(TreeNode, f32)> {
self.tree
.nearest_neighbor_iter_with_distance_2(&node.pos)
.find_map(|(node, dist)| {
if node.get_mult() >= 3.0 {
Some((*node, (dist as f32).sqrt()))
} else {
None
}
})
}
pub fn floyd_warshall(&self, range: f32) -> Result<Vec<System>, String> {
let mut dist: FxHashMap<u64, usize> = FxHashMap::default();
info!("nb...");
let total = self.tree.size();
for (n, node) in self.tree.iter().enumerate() {
if (n % 100_000) == 0 {
println!("{}/{}", n, total);
}
let key = (node.id as u64) << 32;
for nb in self.neighbours(node, range) {
let key = key | nb.id as u64;
dist.entry(key).or_insert(1);
}
let key = ((node.id as u64) << 32) | node.id as u64;
dist.insert(key, 0);
}
todo!()
}
pub fn route_incremental_broadening(&self, range: f32) -> Result<Vec<System>, String> {
/*
h = (dist(node,goal)-(range*node.mult)).max(0.0) // remaining distance after jumping from here
*/
let src = self.tree.nearest_neighbor(&[0.0, 0.0, 0.0]).unwrap();
// let mut route_log = BufWriter::new(File::create("route_log_ib.txt").map_err(|e| e.to_string())?);
let goal = self
.tree
.nearest_neighbor(&[-1111.5625, -134.21875, 65269.75]) // Beagle Point
// .nearest_neighbor(&[-9530.5, -910.28125, 19808.125]) // Colonia
.unwrap();
let mut best_node = FxHashMap::default();
// let mut prev = FxHashMap::default();
let mut queue = MinFHeap::new();
let t_start = Instant::now();
let mut n = 0usize;
let mut skipped = 0usize;
let mut global_best = u32::MAX;
queue.push(heuristic(range, src, goal), (0, src));
loop {
println!("Q: {}", queue.len());
if queue.is_empty() {
warn!(
"Visited: {} | Skipped: {} | search space exhausted after {}",
n,
skipped,
humantime::format_duration(t_start.elapsed())
);
break;
}
while let Some((_, (depth, node))) = queue.pop() {
let best_len = best_node.len();
let best_depth = best_node.entry(node.id).or_insert(depth);
if *best_depth > global_best {
skipped += 1;
continue;
}
// writeln!(route_log,"{}, {}",node.id,depth).map_err(|e| e.to_string())?;
// route_log.flush().map_err(|e| e.to_string())?;
if depth < *best_depth {
*best_depth = depth;
}
n += 1;
if node.id == goal.id {
if depth < global_best {
global_best = global_best.min(depth);
queue.retain(|(_, (d, _))| *d <= global_best);
info!(
"Queued: {}, Skipped: {}, Seen: {} (Total: {}) | Best: {} | elapsed: {}",
queue.len(),
skipped,
n,
best_len,
global_best,
humantime::format_duration(t_start.elapsed()).to_string()
);
}
continue;
} else if n % 10000 == 0 {
info!(
"Queued: {}, Skipped: {}, Seen: {} (Total: {}) | Best: {} | elapsed: {}",
queue.len(),
skipped,
n,
best_len,
global_best,
humantime::format_duration(t_start.elapsed()).to_string()
);
}
self.neighbours(node, node.get_mult() * range)
.filter(|nb| (self.valid(nb.id) || (nb.id == goal.id)))
.filter(|nb| match best_node.get(&nb.id) {
Some(&d) => depth < d,
None => true,
})
.map(|nb| (heuristic(range, nb, goal), nb))
.for_each(|(h, nb)| {
// prev.insert(nb.id, node.id);
queue.push(h, (depth + 1, nb));
});
}
}
todo!()
}
#[cfg_attr(feature = "profiling", tracing::instrument)]
pub fn route_dijkstra(&self, range: f32) -> Result<Vec<System>, String> {
// TODO: exit condition
let total = self.tree.size();
let mut n: usize = 0;
let mut s: usize = 0;
const INF: f32 = std::f32::INFINITY;
let mut queue: MinFHeap<(usize, TreeNode)> = MinFHeap::new();
let mut prev: FxHashMap<u32, u32> = FxHashMap::default();
let mut best: FxHashMap<u32, f32> = FxHashMap::default();
let seed = *self.tree.nearest_neighbor(&[0.0, 0.0, 0.0]).unwrap();
queue.push(0.0, (0, seed));
while let Some((d, (depth, node))) = queue.pop() {
let best_cost = *best.get(&node.id).unwrap_or(&INF);
// println!("{} {}", node.id, best_cost);
if d > best_cost {
s += 1;
continue;
}
n += 1;
if n % 100_000 == 0 {
debug!(
"{}/{} | C: ({}, {}) | Q: {} | S: {} | B: {}",
n,
total,
depth,
d,
queue.len(),
s,
best.len()
);
}
for nb in self.neighbours(&node, node.get_mult() * range) {
let next_cost = *best.get(&nb.id).unwrap_or(&INF);
let new_cost = d + dist(&node.pos, &nb.pos);
if new_cost < next_cost {
best.insert(nb.id, new_cost);
prev.insert(nb.id, node.id);
queue.push(new_cost, (depth + 1, *nb));
} else {
s += 1;
}
}
}
debug!("Prev: {}", prev.len());
debug!("Best: {}", best.len());
todo!()
}
fn neighbor_workers(
&mut self,
num: usize,
range: f32,
) -> (
Sender<Vec<TreeNode>>,
Receiver<Vec<(TreeNode, Vec<TreeNode>)>>,
Vec<JoinHandle<()>>,
) {
let r2 = range * range;
let (tx_q, rx_q) = unbounded::<Vec<TreeNode>>();
let (tx_r, rx_r) = bounded::<Vec<(TreeNode, Vec<TreeNode>)>>(100);
let threads: Vec<JoinHandle<()>> = (0..num)
.map(|_| {
let tree = Arc::clone(&self.tree);
let tx = tx_r.clone();
let rx = rx_q.clone();
thread::spawn(move || {
rx.into_iter().for_each(|nodes| {
let mut ret = vec![];
for node in nodes {
let res: Vec<TreeNode> =
tree.locate_within_distance(node.pos, r2).cloned().collect();
ret.push((node, res));
}
tx.send(ret).unwrap();
});
drop(tx);
})
})
.collect();
(tx_q, rx_r, threads)
}
pub fn precompute_graph(&mut self, range: f32) -> Result<(), String> {
// TODO: fix multithreading workpool (?)
// TODO: actual route precomputation (?)
let total = self.tree.size();
let mut cnt: usize = 0;
let mut seen: FxHashSet<u32> = FxHashSet::default();
let mut queued: FxHashSet<u32> = FxHashSet::default();
info!("Total nodes: {}", total);
let (tx, rx, threads) = self.neighbor_workers(num_cpus::get(), range);
info!("Precomputing graph");
info!("Sumbitting jobs");
let seed = *self.tree.nearest_neighbor(&[0.0, 0.0, 0.0]).unwrap();
seen.insert(seed.id);
queued.insert(seed.id);
tx.send(vec![seed]).unwrap();
info!("Processing...");
// println!("RX:{} TX:{} Q:{}", rx.len(), tx.len(), queued.len());
while let Ok(res) = rx.recv() {
let mut to_send = vec![];
for (node, neighbors) in res {
cnt += neighbors.len();
queued.remove(&node.id);
for nb in neighbors {
if !seen.insert(nb.id) {
queued.insert(nb.id);
to_send.push(nb);
if to_send.len() > 10_000 {
tx.send(to_send).unwrap();
to_send = vec![];
}
}
}
}
if !to_send.is_empty() {
tx.send(to_send).unwrap();
}
// n+=1;
// if n%10000==0 {
info!(
"{} total, {} count, {} seen, rx: {}, tx: {}, Q: {}",
total,
cnt,
seen.len(),
rx.len(),
tx.len(),
queued.len(),
);
// };
if queued.is_empty() {
break;
}
}
for t in threads {
t.join().unwrap();
}
info!("Done!");
todo!("Implement seed parameter");
// Ok(())
}
#[cfg_attr(feature = "profiling", tracing::instrument)]
pub fn precompute_all(&mut self, range: f32) -> Result<(), String> {
use flate2::write::GzEncoder;
let fh_nb = File::create(format!(r#"O:\nb_{}.dat"#, range)).unwrap();
let mut fh_encoder = BufWriter::new(fh_nb);
let mut pos: u64 = 0;
let mut n = 0;
let total = self.tree.size();
// let (tx, rx, threads) = self.neighbor_workers(num_cpus::get(), range);
let mut map: FxHashMap<u32, u64> = FxHashMap::default();
info!("Precomputing neighbor map...");
self.tree.iter().for_each(|node| {
let nb = self.neighbours(node, range).map(|nb| nb.id).collect_vec();
map.insert(node.id, pos);
pos += fh_encoder.write(&bincode::serialize(&nb).unwrap()).unwrap() as u64;
if (n % 10000) == 0 {
let prc = ((n as f64) / (total as f64)) * 100f64;
info!("{}/{} ({:.2}%) done, {} bytes", n, total, prc, pos);
}
n += 1;
});
let mut fh_idx = BufWriter::new(File::create(format!(r#"O:\nb_{}.idx"#, range)).unwrap());
info!("Writing index map");
info!(
"Wrote {} bytes",
fh_idx.write(&bincode::serialize(&map).unwrap()).unwrap()
);
Ok(())
}
fn precompute_to(&mut self, _dst: &System, _range: f32) -> Result<(), String> {
// TODO: -> precompute to
unimplemented!();
}
fn precompute(&mut self, src: &System, range: f32) -> Result<(), String> {
// TODO: -> precompute from
let total = self.tree.size() as f32;
let _t_start = Instant::now();
let mut prev = FxHashMap::default();
let mut seen = FxHashSet::default();
let mut depth = 0;
let mut queue: VecDeque<(usize, TreeNode)> = VecDeque::new();
let mut queue_next: VecDeque<(usize, TreeNode)> = VecDeque::new();
queue.push_front((0, src.to_node()));
seen.insert(src.id);
while !queue.is_empty() {
info!(
"Depth: {}, Queue: {}, Seen: {} ({:.02}%) \r",
depth,
queue.len(),
seen.len(),
((seen.len() * 100) as f32) / total
);
std::io::stdout().flush().unwrap();
while let Some((d, sys)) = queue.pop_front() {
queue_next.extend(
self.neighbours(&sys, sys.get_mult() * range)
// .filter(|&nb| self.valid(nb))
.filter(|&nb| seen.insert(nb.id))
.map(|nb| {
prev.insert(nb.id, sys.id);
(d + 1, *nb)
}),
);
}
std::mem::swap(&mut queue, &mut queue_next);
depth += 1;
}
self.route_tree = Some(prev);
let file_hash = hash_file(&self.path);
let file_hash_hex = file_hash
.iter()
.map(|v| format!("{:02x}", v))
.collect::<Vec<String>>()
.join("");
let ofn = format!(
"{}_{}_{}.router",
src.name.replace('*', "").replace(' ', "_"),
range,
file_hash_hex
);
let mut out_fh = BufWriter::new(File::create(&ofn).unwrap());
let data = (
self.tree.size(),
range,
file_hash,
self.path.clone(),
self.route_tree.as_ref().unwrap(),
);
info!("Done!");
match bincode::serialize_into(&mut out_fh, &data) {
Ok(_) => Ok(()),
Err(e) => Err(format!("Error: {}", e)),
}
}
fn get_sys(&self, id: u32) -> Result<Option<System>, String> {
let path = &self.path;
if let Some(c) = &self.cache {
if let Some(sys) = c.lock().unwrap().get(id)? {
return Ok(Some(sys));
};
}
let mut reader = match csv::ReaderBuilder::new().from_path(path) {
Ok(reader) => reader,
Err(e) => {
return Err(format!("Error opening {}: {}", path.display(), e));
}
};
warn!("Running serial search for ID: {:?}", id);
return Ok(reader
.deserialize::<System>()
.map(|res| res.unwrap())
.filter(|sys| sys.id == id)
.last());
}
fn get_systems_by_ids(&self, ids: &[u32]) -> Result<FxHashMap<u32, System>, String> {
let path = &self.path;
let mut ret = FxHashMap::default();
if let Some(c) = &self.cache {
let mut c = c.lock().unwrap();
let mut missing = false;
for id in ids {
match c.get(*id)? {
Some(sys) => {
ret.insert(*id, sys);
}
None => {
missing = true;
break;
}
}
}
if !missing {
return Ok(ret);
}
}
let mut reader = match csv::ReaderBuilder::new().has_headers(false).from_path(path) {
Ok(reader) => reader,
Err(e) => {
return Err(format!("Error opening {}: {}", path.display(), e));
}
};
warn!("Running serial search for IDs: {:?}", ids);
reader
.deserialize::<System>()
.map(|res| res.unwrap())
.filter(|sys| ids.contains(&sys.id))
.for_each(|sys| {
ret.insert(sys.id, sys);
});
for id in ids {
if !ret.contains_key(id) {
return Err(format!("ID {} not found", id));
}
}
Ok(ret)
}
fn route_to(&self, dst: &System) -> Result<Vec<System>, String> {
if self.route_tree.is_none() {
return Err("Can't computer route without a precomputed route-tree".to_owned());
}
let prev = self.route_tree.as_ref().unwrap();
if !prev.contains_key(&dst.id) {
return Err(format!("System-ID {} not found", dst.id));
};
let mut v_ids: Vec<u32> = Vec::new();
let mut v: Vec<System> = Vec::new();
let mut curr_sys: u32 = dst.id;
loop {
v_ids.push(curr_sys);
match prev.get(&curr_sys) {
Some(sys_id) => curr_sys = *sys_id,
None => {
break;
}
}
}
v_ids.reverse();
let id_map = self.get_systems_by_ids(&v_ids)?;
for sys_id in v_ids {
let sys = match id_map.get(&sys_id) {
Some(sys) => sys,
None => {
return Err(format!("System-ID {} not found!", sys_id));
}
};
v.push(sys.clone())
}
Ok(v)
}
#[cfg_attr(feature = "profiling", tracing::instrument)]
fn route_dfs(
&self,
start_sys: &System,
goal_sys: &System,
range: f32,
) -> Result<Vec<System>, String> {
if start_sys.id == goal_sys.id {
return Ok(vec![goal_sys.clone()]);
}
let mut log_file = BufWriter::new(File::create("route_log.txt").unwrap());
let t_start = Instant::now();
info!("Running DFS");
let src_name = start_sys.name.clone();
let dst_name = goal_sys.name.clone();
let d_total = dist(&start_sys.pos, &goal_sys.pos);
let mut d_rem = d_total;
let mut state = SearchState {
mode: "DFS".into(),
depth: 0,
queue_size: 0,
d_rem,
d_total,
prc_done: 0.0,
n_seen: 0,
prc_seen: 0.0,
from: src_name.clone(),
to: dst_name.clone(),
system: start_sys.name.clone(),
};
let total = self.tree.size() as f32;
let mut prev = FxHashMap::default();
let mut seen: FxHashMap<u32, usize> = FxHashMap::default();
let mut best_depth = usize::MAX;
let found = false;
let mut t_last = Instant::now();
let mut queue: MinFHeap<(usize, TreeNode)> = MinFHeap::new();
// let mut best = (start_sys.distp(goal_sys), start_sys.to_node());
queue.push(d_total, (0, start_sys.to_node()));
seen.insert(start_sys.id, 0);
loop {
while let Some((_, (depth, node))) = queue.pop() {
if depth > best_depth {
continue;
}
let dist_goal = node.distp(goal_sys);
if dist_goal < d_rem {
d_rem = dist_goal;
// best = (d_rem, node);
};
writeln!(log_file, "{},{}", node.id, depth).unwrap();
if node.id == goal_sys.id {
if depth < best_depth {
info!("Goal reached in {} jumps, best: {}", depth, best_depth);
}
best_depth = best_depth.min(depth);
}
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
state.depth = depth;
state.queue_size = queue.len();
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
state.d_rem = d_rem;
state.n_seen = seen.len();
state.prc_seen = ((seen.len() * 100) as f32) / total;
state.system = node.get(self)?.unwrap().name.clone();
if let Some(cb) = &self.callback {
match cb(&state) {
Ok(_) => (),
Err(e) => {
return Err(format!("{:?}", e));
}
};
}
t_last = Instant::now();
}
self.neighbours(&node, node.get_mult() * range)
.filter(|nb| (self.valid(nb.id) || (nb.id == goal_sys.id)))
.filter(|nb| {
let depth = depth + 1;
if depth > best_depth {
return false;
}
let mut better = true;
seen.entry(nb.id)
.and_modify(|e| {
better = depth <= *e;
if better {
*e = depth;
}
})
.or_insert(depth);
return better;
})
.for_each(|nb| {
prev.insert(nb.id, node);
queue.push(dist(&nb.pos, &goal_sys.pos), (depth + 1, *nb));
});
}
// let next_len=queue_next.len();
// info!("Queue: {} | Depth: {} | best_d_goal: {}",queue_next.len(),depth,best_d_goal);
if found {
break;
}
if queue.is_empty() {
break;
}
}
info!("Took: {}", format_duration(t_start.elapsed()));
if !found {
return Err(format!(
"No route from {} to {} found, remaining distance: {} Ly",
src_name, dst_name, d_rem
));
}
let mut v: Vec<System> = Vec::new();
let mut curr_sys = goal_sys.clone();
loop {
v.push(curr_sys.clone());
match prev.get(&curr_sys.id) {
Some(sys) => curr_sys = sys.get(self)?.unwrap(),
None => {
break;
}
}
}
v.reverse();
Ok(v)
}
#[cfg_attr(feature = "profiling", tracing::instrument)]
fn route_bfs(
&self,
start_sys: &System,
goal_sys: &System,
range: f32,
beam_width: &BeamWidth,
max_dist: f32,
) -> Result<Vec<System>, String> {
if self.workers.is_empty() {
return self.route_bfs_serial(start_sys, goal_sys, range, max_dist, beam_width);
}
info!("Running BFS with {} worker(s)", self.workers.num());
let t_start = Instant::now();
let mut t_last = Instant::now();
let mut prev = FxHashMap::default();
let mut seen: FxHashMap<u32, f32> = FxHashMap::default();
let src_name = start_sys.name.clone();
let dst_name = goal_sys.name.clone();
let goal_node = goal_sys.to_node();
let num_workers = self.workers.num();
let workers = &self.workers;
let wu = WorkUnit {
node: start_sys.to_node(),
parent_id: None,
depth: 0,
range,
};
if wu.node.id == goal_sys.id {
return Ok(vec![goal_sys.clone()]);
}
let mut found = false;
let total = self.tree.size() as f32;
let d_total = dist(&start_sys.pos, &goal_sys.pos);
let mut d_rem = d_total;
let mut state = SearchState {
mode: format!("BFS_parallel({})", num_workers),
depth: 0,
queue_size: 0,
d_rem,
d_total,
prc_done: 0.0,
n_seen: 0,
prc_seen: 0.0,
from: src_name.clone(),
to: dst_name.clone(),
system: start_sys.name.clone(),
};
seen.insert(wu.node.id, 0.0);
workers.send(wu).unwrap();
loop {
if found {
break;
}
let num_seen = seen.len();
let mut nbs: Vec<_> = workers
.recv()?
.iter()
.filter(|wu| !found && !seen.contains_key(&wu.node.id))
.filter(|wu| wu.parent_id.is_some())
.cloned()
.collect();
if nbs.is_empty() && workers.queue_empty() && seen.len() > 1 {
break;
}
if beam_width.is_set() {
let bw = beam_width.compute(nbs.len());
nbs.sort_by_key(|v| {
return F32(heuristic(range, &v.node, &goal_node));
});
nbs = nbs.iter().take(bw.max(1)).cloned().collect();
}
while let Some(wu) = nbs.pop() {
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
let dist = wu.node.distp(goal_sys);
if dist < d_rem {
d_rem = dist;
};
state.depth = wu.depth;
state.queue_size = workers.queue_size();
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
state.d_rem = d_rem;
state.n_seen = num_seen;
state.prc_seen = ((num_seen * 100) as f32) / total;
{
let s = self.get(wu.node.id)?.unwrap();
state.system = s.name;
}
if let Some(cb) = &self.callback {
match cb(&state) {
Ok(_) => (),
Err(e) => {
return Err(format!("{:?}", e));
}
};
}
t_last = Instant::now();
}
if let Some(parent_id) = wu.parent_id {
prev.insert(wu.node.id, parent_id);
}
seen.insert(wu.node.id, 0.0);
if wu.node.id == goal_sys.id {
found = true;
break;
}
workers.send(wu.clone()).unwrap();
}
}
info!("Took: {}", format_duration(t_start.elapsed()));
if !found {
return Err(format!("No route from {} to {} found!", src_name, dst_name));
}
let mut v: Vec<System> = Vec::new();
let mut curr_sys = goal_sys.clone();
loop {
v.push(curr_sys.clone());
match prev.get(&curr_sys.id) {
Some(sys) => {
curr_sys = self
.get_sys(*sys)?
.ok_or(format!("System id {} not found", sys))?
}
None => {
break;
}
}
}
v.reverse();
Ok(v)
}
#[cfg_attr(feature = "profiling", tracing::instrument)]
fn route_bfs_serial(
&self,
start_sys: &System,
goal_sys: &System,
range: f32,
max_dist: f32,
beam_width: &BeamWidth,
) -> Result<Vec<System>, String> {
if start_sys.id == goal_sys.id {
return Ok(vec![goal_sys.clone()]);
}
// let mut log_file = BufWriter::new(File::create("route_log.txt").unwrap());
let t_start = Instant::now();
let _max_dist = max_dist * max_dist;
info!("Running BFS");
let src_name = start_sys.name.clone();
let dst_name = goal_sys.name.clone();
let d_total = dist(&start_sys.pos, &goal_sys.pos);
let mut d_rem = d_total;
let mut state = SearchState {
mode: "BFS_serial".into(),
depth: 0,
queue_size: 0,
d_rem,
d_total,
prc_done: 0.0,
n_seen: 0,
prc_seen: 0.0,
from: src_name.clone(),
to: dst_name.clone(),
system: start_sys.name.clone(),
};
let total = self.tree.size() as f32;
let mut prev = FxHashMap::default();
let mut seen: FxHashMap<u32, f32> = FxHashMap::default();
seen.reserve(self.tree.size());
prev.reserve(self.tree.size());
let mut depth = 0;
let mut found = false;
let mut t_last = Instant::now();
let mut queue: VecDeque<&TreeNode> = VecDeque::new();
let mut queue_next: VecDeque<&TreeNode> = VecDeque::new();
queue.reserve(100_000);
queue_next.reserve(100_000);
let start_node = start_sys.to_node();
let end_node = goal_sys.to_node();
let mut best = (start_sys.distp(goal_sys), start_sys.to_node());
queue.push_front(&start_node);
seen.insert(start_sys.id, 0.0);
while !found {
while let Some(node) = queue.pop_front() {
let _h_curr = heuristic(range, node, &end_node);
let dist = node.distp(goal_sys);
if dist < d_rem {
d_rem = dist;
best = (d_rem, *node);
};
// writeln!(log_file, "{},{}", node.id, depth).unwrap();
if node.id == goal_sys.id {
queue.clear();
found = true;
break;
}
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
state.depth = depth;
state.queue_size = queue.len() + queue_next.len();
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
state.d_rem = d_rem;
state.n_seen = seen.len();
state.prc_seen = ((seen.len() * 100) as f32) / total;
if !queue.is_empty() {
let s = queue.get(0).unwrap().get(self)?.unwrap();
state.system = s.name.clone();
}
if let Some(cb) = &self.callback {
match cb(&state) {
Ok(_) => (),
Err(e) => {
return Err(format!("{:?}", e));
}
};
}
t_last = Instant::now();
}
let jump_range = node.get_mult() * range;
let valid_nbs = self
.neighbours(node, jump_range)
.filter(|nb| (self.valid(nb.id) || (nb.id == goal_sys.id)))
.filter(|nb| seen.insert(nb.id, 0.0).is_none());
queue_next.extend(valid_nbs.map(|nb| {
prev.insert(nb.id, node);
nb
}));
}
// let next_len=queue_next.len();
// let mut queue_filt = Vec::new();
// let best_d_goal = dist(&best.1.pos, &goal_sys.pos);
// // info!("Queue: {} | Depth: {} | best_d_goal: {}",queue_next.len(),depth,best_d_goal);
// while let Some(v) = queue_next.pop_front() {
// let d_current = dist(&v.pos, &goal_sys.pos);
// let diff = (d_current - best_d_goal).abs();
// // info!("diff: {} | in_range: {}",diff, diff<(range*8.0));
// if diff < (range * 32.0) {
// queue_filt.push(v);
// };
// // if dist(&v.pos,&best.1.pos)<(range*2.0) {
// // queue_filt.push(v);
// // }
// }
if beam_width.is_set() {
let queue_filt = queue_next.make_contiguous();
queue_filt.sort_by_key(|v| {
return F32(heuristic(range, v, &end_node));
});
queue.clear();
let bw = beam_width.compute(queue_next.len());
queue.extend(queue_next.drain(..).take(bw.max(1)));
queue_next.clear();
} else {
queue.clear();
queue.append(&mut queue_next);
}
if found {
break;
}
if queue.is_empty() {
break;
}
depth += 1;
}
info!("Took: {}", format_duration(t_start.elapsed()));
if !found {
return Err(format!(
"No route from {} to {} found, remaining distance: {} Ly",
src_name, dst_name, d_rem
));
}
let mut v: Vec<System> = Vec::new();
let mut curr_sys = goal_sys.clone();
loop {
v.push(curr_sys.clone());
match prev.get(&curr_sys.id) {
Some(sys) => curr_sys = sys.get(self)?.unwrap(),
None => {
break;
}
}
}
v.reverse();
Ok(v)
}
#[cfg_attr(feature = "profiling", tracing::instrument)]
pub fn route_bidir(
&self,
start_sys: &System,
goal_sys: &System,
range: f32,
) -> Result<Vec<System>, String> {
if start_sys.id == goal_sys.id {
return Ok(vec![goal_sys.clone()]);
}
let mut n: usize = 0;
let _log_file = BufWriter::new(File::create("route_log.txt").unwrap());
let t_start = Instant::now();
info!("Running BiDir");
let src_name = start_sys.name.clone();
let dst_name = goal_sys.name.clone();
let d_total = dist(&start_sys.pos, &goal_sys.pos);
let d_rem = d_total;
let _state = SearchState {
mode: "BiDir".into(),
depth: 0,
queue_size: 0,
d_rem,
d_total,
prc_done: 0.0,
n_seen: 0,
prc_seen: 0.0,
from: src_name.clone(),
to: dst_name.clone(),
system: start_sys.name.clone(),
};
let _total = self.tree.size() as f32;
let mut prev = FxHashMap::default();
let mut seen_fwd: FxHashSet<u32> = FxHashSet::default();
let mut seen_rev: FxHashSet<u32> = FxHashSet::default();
let mut t_last = Instant::now();
let mut queue: VecDeque<(usize, BiDirNode)> = VecDeque::new();
queue.push_back((0, BiDirNode::Forward(start_sys.to_node())));
queue.push_back((0, BiDirNode::Backwards(goal_sys.to_node())));
seen_fwd.insert(start_sys.id);
seen_rev.insert(goal_sys.id);
while FxHashSet::is_disjoint(&seen_fwd, &seen_rev) {
if queue.is_empty() {
return Err(format!(
"No route from {} to {} found, remaining distance: {} Ly",
src_name, dst_name, d_rem
));
}
while let Some((depth, node)) = queue.pop_front() {
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
if !FxHashSet::is_disjoint(&seen_fwd, &seen_rev) {
break;
}
info!("Q: {}, D: {}", queue.len(), depth);
t_last = Instant::now();
}
n += 1;
let next_depth = depth + 1;
match node {
BiDirNode::Forward(node) => {
let nbs =
self.neighbours(&node, node.get_mult() * range)
.filter_map(|nb| {
if !seen_fwd.insert(nb.id) {
return None;
}
prev.insert(nb.id, node.id);
Some((next_depth, BiDirNode::Forward(*nb)))
});
queue.extend(nbs);
}
BiDirNode::Backwards(node) => {
let nbs = self.neighbours_r(&node, range).filter_map(|nb| {
if !seen_rev.insert(nb.id) {
return None;
}
prev.insert(node.id, nb.id);
Some((next_depth, BiDirNode::Backwards(*nb)))
});
queue.extend(nbs);
}
};
}
}
info!("Took: {}", format_duration(t_start.elapsed()));
let mut v: Vec<System> = Vec::new();
let mut curr_sys = goal_sys.clone().id;
v.push(self.get_sys(curr_sys)?.unwrap());
while let Some(&prev_sys_id) = prev.get(&curr_sys) {
v.push(self.get_sys(prev_sys_id)?.unwrap());
curr_sys = prev_sys_id;
}
v.reverse();
Ok(v)
}
}
impl Router {
#[cfg_attr(feature = "profiling", tracing::instrument)]
pub fn compute_route(
&mut self,
sys_ids: &[u32],
range: Option<f32>,
mode: ModeConfig,
num_workers: usize,
) -> Result<Vec<System>, String> {
let mut has_ship = false;
if range.is_none() {
if let ModeConfig::Ship { .. } = mode {
has_ship = true;
} else {
return Err(
"Need either a jump range or a ship to compute a route with!".to_owned(),
);
}
}
let range = if has_ship {
0.0
} else {
range.ok_or("Dynamic range calculation is not yet implemented, sorry!")?
};
let id_map = self.get_systems_by_ids(sys_ids)?;
let hops: Vec<System> = sys_ids
.iter()
.map(|id| id_map.get(id).unwrap())
.cloned()
.collect();
self.multiroute(&hops, range, mode, num_workers)
}
}