2022-02-23 21:45:59 +00:00
|
|
|
//! Route computation functions using various graph search algorithms
|
|
|
|
use crate::common::{
|
|
|
|
dist, dist2, distm, fcmp, heuristic, BeamWidth, MinFHeap, System, TreeNode, F32,
|
|
|
|
};
|
|
|
|
#[cfg(feature = "profiling")]
|
|
|
|
use crate::profiling::{span, Level};
|
2020-06-16 13:38:31 +00:00
|
|
|
use crate::ship::Ship;
|
2022-02-23 21:45:59 +00:00
|
|
|
|
|
|
|
use crossbeam_channel::{bounded, unbounded, Receiver, SendError, Sender};
|
2022-06-14 21:00:50 +00:00
|
|
|
use dashmap::{DashMap, DashSet};
|
2020-03-28 13:53:52 +00:00
|
|
|
use derivative::Derivative;
|
|
|
|
use dict_derive::IntoPyObject;
|
2022-02-23 21:45:59 +00:00
|
|
|
|
2020-02-05 23:24:24 +00:00
|
|
|
use humantime::format_duration;
|
2022-02-23 21:45:59 +00:00
|
|
|
|
|
|
|
use itertools::Itertools;
|
|
|
|
use log::*;
|
2020-02-05 23:24:24 +00:00
|
|
|
use permutohedron::LexicalPermutation;
|
2022-02-23 21:45:59 +00:00
|
|
|
|
2020-02-05 23:24:24 +00:00
|
|
|
use pyo3::prelude::*;
|
2022-02-23 21:45:59 +00:00
|
|
|
use pythonize::depythonize;
|
2022-06-14 21:00:50 +00:00
|
|
|
use rayon::prelude::*;
|
|
|
|
use rayon::ThreadPoolBuilder;
|
2020-02-05 23:24:24 +00:00
|
|
|
use rstar::{PointDistance, RStarInsertionStrategy, RTree, RTreeObject, RTreeParams, AABB};
|
2022-02-23 21:45:59 +00:00
|
|
|
use rustc_hash::{FxHashMap, FxHashSet};
|
|
|
|
use serde::{Deserialize, Serialize};
|
2020-02-05 23:24:24 +00:00
|
|
|
use sha3::{Digest, Sha3_256};
|
2022-02-23 21:45:59 +00:00
|
|
|
use std::convert::TryFrom;
|
2020-02-05 23:24:24 +00:00
|
|
|
use std::fs::File;
|
|
|
|
use std::hash::{Hash, Hasher};
|
|
|
|
use std::io::{BufReader, BufWriter, Write};
|
|
|
|
use std::path::PathBuf;
|
2022-06-14 21:00:50 +00:00
|
|
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
2020-02-05 23:24:24 +00:00
|
|
|
use std::sync::{Arc, Mutex};
|
|
|
|
use std::thread;
|
|
|
|
use std::thread::JoinHandle;
|
2022-06-14 21:00:50 +00:00
|
|
|
use std::time::{Duration, Instant};
|
2022-02-23 21:45:59 +00:00
|
|
|
use std::{
|
|
|
|
collections::{BinaryHeap, VecDeque},
|
|
|
|
path::Path,
|
|
|
|
};
|
|
|
|
|
|
|
|
type RouterCallback = Box<dyn Fn(&SearchState) -> PyResult<PyObject> + Send>;
|
2020-02-05 23:24:24 +00:00
|
|
|
|
2020-03-28 13:53:52 +00:00
|
|
|
const STATUS_INVERVAL: u128 = 5000; //ms
|
2020-02-05 23:24:24 +00:00
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
|
|
|
|
pub enum ShipMode {
|
|
|
|
Fuel,
|
|
|
|
Jumps,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for ShipMode {
|
|
|
|
fn default() -> Self {
|
|
|
|
ShipMode::Fuel
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize, Serialize)]
|
|
|
|
enum PrecompTree {
|
|
|
|
Full { id: u32, map: Vec<u32> },
|
|
|
|
Partial { parent: u32, diff: Vec<(u32, u32)> },
|
|
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
|
|
struct ShipRouteState {
|
|
|
|
cost: f32,
|
|
|
|
fuel: f32,
|
|
|
|
node: TreeNode,
|
|
|
|
refuels: usize,
|
|
|
|
depth: usize,
|
|
|
|
dist: f32,
|
|
|
|
mode: ShipMode,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Ord for ShipRouteState {
|
|
|
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
|
|
|
if self.mode != other.mode {
|
|
|
|
panic!(
|
|
|
|
"Trying to compare incompatible states: {:?} and {:?}",
|
|
|
|
self.mode, other.mode
|
|
|
|
);
|
|
|
|
};
|
|
|
|
match self.mode {
|
|
|
|
ShipMode::Fuel => {
|
|
|
|
// (cost,refuels)
|
|
|
|
fcmp(self.cost, other.cost).then(self.refuels.cmp(&other.refuels))
|
|
|
|
}
|
|
|
|
ShipMode::Jumps => {
|
|
|
|
// (depth,refules,cost)
|
|
|
|
self.depth
|
|
|
|
.cmp(&other.depth)
|
|
|
|
.then(self.refuels.cmp(&other.refuels))
|
|
|
|
.then(fcmp(self.cost, other.cost))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
.reverse()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl PartialOrd for ShipRouteState {
|
|
|
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
|
|
|
Some(self.cmp(other))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl PartialEq for ShipRouteState {
|
|
|
|
fn eq(&self, other: &Self) -> bool {
|
|
|
|
self.node.id == other.node.id && self.depth == other.depth && self.refuels == other.refuels
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Eq for ShipRouteState {}
|
|
|
|
|
2020-03-28 13:53:52 +00:00
|
|
|
struct Weight {
|
|
|
|
dist_from_start: f32,
|
|
|
|
dist_to_goal: f32,
|
|
|
|
dist_to_point: Vec<(f32, [f32; 3])>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Weight {
|
|
|
|
fn calc(&self, node: &TreeNode, dst: &TreeNode, src: &TreeNode) -> f32 {
|
2020-06-16 13:38:31 +00:00
|
|
|
let d_total = dist(&src.pos, &dst.pos);
|
|
|
|
let d_start = (dist(&node.pos, &src.pos) / d_total) * self.dist_from_start;
|
|
|
|
let d_goal = (dist(&node.pos, &dst.pos) / d_total) * self.dist_to_goal;
|
|
|
|
let points: f32 = self
|
|
|
|
.dist_to_point
|
|
|
|
.iter()
|
|
|
|
.map(|&(f, p)| dist(&p, &node.pos) * f)
|
|
|
|
.sum();
|
|
|
|
return d_start + d_goal + points;
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, IntoPyObject)]
|
2020-02-05 23:24:24 +00:00
|
|
|
pub struct SearchState {
|
|
|
|
pub mode: String,
|
|
|
|
pub system: String,
|
|
|
|
pub from: String,
|
|
|
|
pub to: String,
|
|
|
|
pub depth: usize,
|
|
|
|
pub queue_size: usize,
|
|
|
|
pub d_rem: f32,
|
|
|
|
pub d_total: f32,
|
|
|
|
pub prc_done: f32,
|
|
|
|
pub n_seen: usize,
|
|
|
|
pub prc_seen: f32,
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
fn default_greedyness() -> f32 {
|
|
|
|
0.5
|
|
|
|
}
|
|
|
|
#[derive(Debug, Deserialize)]
|
|
|
|
pub struct PyModeConfig {
|
|
|
|
#[serde(default)]
|
|
|
|
pub mode: String,
|
|
|
|
#[serde(default)]
|
|
|
|
pub beam_width: BeamWidth,
|
|
|
|
#[serde(default = "default_greedyness")]
|
|
|
|
pub greedyness: f32,
|
|
|
|
#[serde(default)]
|
|
|
|
pub ship: Option<Ship>,
|
|
|
|
#[serde(default)]
|
|
|
|
pub use_distance: bool,
|
|
|
|
#[serde(default)]
|
|
|
|
pub ship_mode: String,
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
2022-02-23 21:45:59 +00:00
|
|
|
pub enum ModeConfig {
|
|
|
|
BreadthFirst {
|
|
|
|
beam_width: BeamWidth,
|
|
|
|
},
|
|
|
|
DepthFirst,
|
|
|
|
IncrementalBroadening, // TODO: implement IncrementalBroadening
|
|
|
|
BiDir,
|
|
|
|
AStar {
|
|
|
|
greedyness: f32,
|
|
|
|
beam_width: BeamWidth,
|
|
|
|
use_distance: bool,
|
|
|
|
},
|
|
|
|
Dijkstra,
|
2020-02-05 23:24:24 +00:00
|
|
|
Greedy,
|
2022-02-23 21:45:59 +00:00
|
|
|
Ship {
|
|
|
|
mode: ShipMode,
|
|
|
|
ship: Ship,
|
|
|
|
},
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
impl std::default::Default for PyModeConfig {
|
|
|
|
fn default() -> Self {
|
|
|
|
Self {
|
|
|
|
mode: "bfs".to_string(),
|
|
|
|
beam_width: BeamWidth::Absolute(8192),
|
|
|
|
greedyness: 0.0,
|
|
|
|
ship: None,
|
|
|
|
use_distance: false,
|
|
|
|
ship_mode: "jumps".to_string(),
|
|
|
|
}
|
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
impl std::fmt::Display for ModeConfig {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
|
|
match self {
|
|
|
|
ModeConfig::BreadthFirst { beam_width } => {
|
|
|
|
write!(f, "Bread-first search, beam width: {}", beam_width)
|
|
|
|
}
|
|
|
|
ModeConfig::DepthFirst => write!(f, "Depth-first search"),
|
|
|
|
ModeConfig::IncrementalBroadening => write!(f, "Incrementally broadening beam search"),
|
|
|
|
ModeConfig::BiDir => write!(f, "Bi-directional search"),
|
|
|
|
ModeConfig::AStar {
|
|
|
|
greedyness,
|
|
|
|
beam_width,
|
|
|
|
use_distance,
|
|
|
|
} => {
|
|
|
|
if *use_distance {
|
|
|
|
write!(
|
|
|
|
f,
|
|
|
|
"A*-Search, least distance, greedyness: {} %, beam width: {}",
|
|
|
|
greedyness, beam_width
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
write!(
|
|
|
|
f,
|
|
|
|
"A*-Search, least jumps, greedyness: {} %, beam width: {}",
|
|
|
|
greedyness, beam_width
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ModeConfig::Dijkstra => write!(f, "Dijkstra shortest path"),
|
|
|
|
ModeConfig::Greedy => write!(f, "Greedy search"),
|
|
|
|
ModeConfig::Ship { mode, .. } => match mode {
|
|
|
|
ShipMode::Fuel => write!(f, "Ship: Least fuel consumption"),
|
|
|
|
ShipMode::Jumps => write!(f, "Ship: Least number of jumps"),
|
|
|
|
},
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
impl std::default::Default for ModeConfig {
|
|
|
|
fn default() -> Self {
|
|
|
|
Self::BreadthFirst {
|
|
|
|
beam_width: BeamWidth::Absolute(8192),
|
|
|
|
}
|
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
impl FromPyObject<'_> for PyModeConfig {
|
|
|
|
fn extract(ob: &PyAny) -> PyResult<Self> {
|
|
|
|
depythonize(ob).map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("{}", e)))
|
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
impl TryFrom<PyModeConfig> for ModeConfig {
|
|
|
|
type Error = pyo3::PyErr;
|
|
|
|
fn try_from(value: PyModeConfig) -> PyResult<Self> {
|
|
|
|
let ret = match value.mode.as_str() {
|
|
|
|
"bi_dir" => ModeConfig::BiDir,
|
|
|
|
"incremental_broadening" => ModeConfig::IncrementalBroadening,
|
|
|
|
"dijkstra" => ModeConfig::Dijkstra,
|
|
|
|
"a_star" | "astar" => ModeConfig::AStar {
|
|
|
|
greedyness: value.greedyness,
|
|
|
|
beam_width: value.beam_width,
|
|
|
|
use_distance: value.use_distance,
|
|
|
|
},
|
|
|
|
"dfs" | "depth_first" => ModeConfig::DepthFirst,
|
|
|
|
"bfs" | "breadth_first" => ModeConfig::BreadthFirst {
|
|
|
|
beam_width: value.beam_width,
|
|
|
|
},
|
|
|
|
"ship" => {
|
|
|
|
let ship_mode = match value.ship_mode.as_str() {
|
|
|
|
"jumps" => ShipMode::Jumps,
|
|
|
|
"fuel" => ShipMode::Fuel,
|
|
|
|
other => {
|
|
|
|
return Err(pyo3::exceptions::PyRuntimeError::new_err(format!(
|
|
|
|
"invalid ship mode: {}",
|
|
|
|
other
|
|
|
|
)))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
ModeConfig::Ship {
|
|
|
|
ship: value.ship.ok_or_else(|| {
|
|
|
|
pyo3::exceptions::PyRuntimeError::new_err(
|
|
|
|
"missing ship config!".to_string(),
|
|
|
|
)
|
|
|
|
})?,
|
|
|
|
mode: ship_mode,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
other => {
|
|
|
|
return Err(pyo3::exceptions::PyRuntimeError::new_err(format!(
|
|
|
|
"invalid mode: {}",
|
|
|
|
other
|
|
|
|
)))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
if let ModeConfig::AStar {
|
|
|
|
greedyness,
|
|
|
|
beam_width,
|
|
|
|
use_distance: _,
|
|
|
|
} = &ret
|
|
|
|
{
|
|
|
|
if *greedyness >= 1.0 {
|
|
|
|
warn!("greedyness {}>=1.0, switching to greedy search", greedyness);
|
|
|
|
return Ok(ModeConfig::Greedy);
|
|
|
|
}
|
|
|
|
if *greedyness <= 0.0 {
|
|
|
|
warn!(
|
|
|
|
"greedyness {}<=0.0, switching to breadth-first search",
|
|
|
|
greedyness
|
|
|
|
);
|
|
|
|
return Ok(ModeConfig::BreadthFirst {
|
|
|
|
beam_width: beam_width.clone(),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return Ok(ret);
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
#[derive(Debug)]
|
|
|
|
#[allow(non_camel_case_types)]
|
|
|
|
pub enum PrecomputeMode {
|
|
|
|
Full,
|
2022-06-14 21:00:50 +00:00
|
|
|
Route_From(u32),
|
|
|
|
Route_To(u32),
|
2022-02-23 21:45:59 +00:00
|
|
|
None,
|
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
|
|
|
|
impl System {
|
|
|
|
pub fn dist2(&self, p: &[f32; 3]) -> f32 {
|
|
|
|
dist2(&self.pos, p)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn distp(&self, p: &System) -> f32 {
|
|
|
|
dist(&self.pos, &p.pos)
|
|
|
|
}
|
|
|
|
pub fn distp2(&self, p: &System) -> f32 {
|
|
|
|
self.dist2(&p.pos)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TreeNode {
|
|
|
|
pub fn dist2(&self, p: &[f32; 3]) -> f32 {
|
|
|
|
dist2(&self.pos, p)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn distp(&self, p: &System) -> f32 {
|
|
|
|
dist(&self.pos, &p.pos)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl PartialEq for System {
|
|
|
|
fn eq(&self, other: &Self) -> bool {
|
|
|
|
self.id == other.id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Eq for System {}
|
|
|
|
|
|
|
|
impl Hash for System {
|
|
|
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
|
|
self.id.hash(state);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl RTreeObject for TreeNode {
|
|
|
|
type Envelope = AABB<[f32; 3]>;
|
|
|
|
|
|
|
|
fn envelope(&self) -> Self::Envelope {
|
|
|
|
AABB::from_point(self.pos)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl PointDistance for TreeNode {
|
|
|
|
fn distance_2(&self, point: &[f32; 3]) -> f32 {
|
2022-02-23 21:45:59 +00:00
|
|
|
self.dist2(point)
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
fn hash_file(path: &Path) -> Vec<u8> {
|
2020-02-05 23:24:24 +00:00
|
|
|
let mut hash_reader = BufReader::new(File::open(path).unwrap());
|
|
|
|
let mut hasher = Sha3_256::new();
|
|
|
|
std::io::copy(&mut hash_reader, &mut hasher).unwrap();
|
2020-06-16 13:38:31 +00:00
|
|
|
hasher.finalize().iter().copied().collect()
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct LineCache {
|
|
|
|
cache: Vec<u64>,
|
|
|
|
reader: csv::Reader<File>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl LineCache {
|
2022-02-23 21:45:59 +00:00
|
|
|
pub fn new(path: &Path) -> Result<Arc<Mutex<Self>>, String> {
|
|
|
|
Ok(Arc::new(Mutex::new(Self::create(path)?)))
|
|
|
|
}
|
|
|
|
pub fn create(path: &Path) -> Result<Self, String> {
|
|
|
|
use crate::common::build_index;
|
|
|
|
let stars_hash = hash_file(path);
|
2020-02-05 23:24:24 +00:00
|
|
|
let idx_path = path.with_extension("idx");
|
2020-03-28 13:53:52 +00:00
|
|
|
if !idx_path.exists() {
|
2022-02-23 21:45:59 +00:00
|
|
|
warn!("No index found for {:?}, building...", path);
|
|
|
|
build_index(path).map_err(|e| format!("Error creating index for {:?}: {}", path, e))?;
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let (hash, cache): (Vec<u8>, Vec<u8>) = bincode::deserialize_from(&mut BufReader::new(
|
2020-03-28 13:53:52 +00:00
|
|
|
File::open(idx_path)
|
2022-02-23 21:45:59 +00:00
|
|
|
.map_err(|e| format!("Error opening index for {:?}: {}", path, e))?,
|
2020-03-28 13:53:52 +00:00
|
|
|
))
|
2022-02-23 21:45:59 +00:00
|
|
|
.map_err(|e| format!("Reading index for {:?}: {}", path, e))?;
|
|
|
|
if hash != stars_hash {
|
|
|
|
return Err(format!("Missmatched hash for {:?}", path));
|
|
|
|
}
|
|
|
|
let reader = csv::ReaderBuilder::new()
|
|
|
|
.has_headers(false)
|
|
|
|
.from_path(path)
|
|
|
|
.map_err(|e| format!("Error opening csv file {:?}: {}", path, e))?;
|
|
|
|
let cache: Vec<u64> = cache
|
|
|
|
.iter()
|
|
|
|
.scan(0u64, |s, &v| {
|
|
|
|
*s += v as u64;
|
|
|
|
Some(*s)
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
Ok(Self { reader, cache })
|
|
|
|
}
|
|
|
|
fn read_sys(&mut self) -> Result<Option<System>, String> {
|
|
|
|
self.reader
|
|
|
|
.deserialize()
|
|
|
|
.next()
|
|
|
|
.transpose()
|
|
|
|
.map_err(|e| format!("{}", e))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get(&mut self, id: u32) -> Result<Option<System>, String> {
|
2020-02-05 23:24:24 +00:00
|
|
|
let mut pos = csv::Position::new();
|
|
|
|
pos.set_byte(self.cache[id as usize]);
|
2022-02-23 21:45:59 +00:00
|
|
|
match self.reader.seek(pos) {
|
|
|
|
Ok(_) => self.read_sys(),
|
|
|
|
Err(_) => Ok(None),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn len(&self) -> usize {
|
|
|
|
self.cache.len()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_empty(&self) -> bool {
|
|
|
|
self.cache.is_empty()
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct LargeNodeParameters;
|
|
|
|
impl RTreeParams for LargeNodeParameters {
|
|
|
|
const MIN_SIZE: usize = 200;
|
|
|
|
const MAX_SIZE: usize = 400;
|
|
|
|
const REINSERTION_COUNT: usize = 100;
|
|
|
|
type DefaultInsertionStrategy = RStarInsertionStrategy;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub type LargeNodeRTree<T> = RTree<T, LargeNodeParameters>;
|
|
|
|
|
|
|
|
#[derive(Debug, Clone)]
|
|
|
|
struct WorkUnit {
|
|
|
|
node: TreeNode,
|
|
|
|
depth: usize,
|
|
|
|
parent_id: Option<u32>,
|
2020-03-28 13:53:52 +00:00
|
|
|
range: f32,
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
enum WorkerSet {
|
|
|
|
Empty,
|
|
|
|
Workers {
|
|
|
|
handles: Vec<JoinHandle<()>>,
|
|
|
|
tx: Sender<Option<WorkUnit>>,
|
2022-02-23 21:45:59 +00:00
|
|
|
rx: Receiver<Vec<WorkUnit>>,
|
2020-02-05 23:24:24 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
impl WorkerSet {
|
2020-03-28 13:53:52 +00:00
|
|
|
fn new(tree: Arc<LargeNodeRTree<TreeNode>>, num_workers: usize) -> Self {
|
2020-02-05 23:24:24 +00:00
|
|
|
if num_workers == 0 {
|
|
|
|
return WorkerSet::Empty;
|
|
|
|
}
|
|
|
|
let (jobs_tx, jobs_rx) = unbounded();
|
|
|
|
let (result_tx, result_rx) = bounded(100_000);
|
|
|
|
let handles = (0..num_workers)
|
|
|
|
.map(|_| {
|
|
|
|
thread::spawn({
|
|
|
|
let rx = jobs_rx.clone();
|
|
|
|
let tx = result_tx.clone();
|
|
|
|
let tree = tree.clone();
|
|
|
|
move || {
|
2020-03-28 13:53:52 +00:00
|
|
|
Self::work(&tree, rx, tx);
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
return WorkerSet::Workers {
|
|
|
|
handles,
|
|
|
|
tx: jobs_tx,
|
|
|
|
rx: result_rx,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
fn work(
|
|
|
|
tree: &LargeNodeRTree<TreeNode>,
|
|
|
|
rx: Receiver<Option<WorkUnit>>,
|
|
|
|
tx: Sender<Vec<WorkUnit>>,
|
|
|
|
) {
|
|
|
|
#[cfg(feature = "profiling")]
|
|
|
|
let span = span!(Level::INFO, "nb_worker");
|
|
|
|
#[cfg(feature = "profiling")]
|
|
|
|
let guard = span.enter();
|
2020-03-28 13:53:52 +00:00
|
|
|
while let Ok(Some(unit)) = rx.recv() {
|
2022-02-23 21:45:59 +00:00
|
|
|
let range = unit.range * unit.node.get_mult();
|
|
|
|
let res = tree
|
|
|
|
.locate_within_distance(unit.node.pos, range * range)
|
2020-03-28 13:53:52 +00:00
|
|
|
.cloned()
|
2022-02-23 21:45:59 +00:00
|
|
|
.map(|nb| WorkUnit {
|
|
|
|
node: nb,
|
|
|
|
depth: unit.depth + 1,
|
|
|
|
parent_id: Some(unit.node.id),
|
|
|
|
range: unit.range,
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
tx.send(res).unwrap();
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
|
|
|
drop(tx);
|
2022-02-23 21:45:59 +00:00
|
|
|
#[cfg(feature = "profiling")]
|
|
|
|
drop(guard);
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
fn resize(&mut self, tree: Arc<LargeNodeRTree<TreeNode>>, num: usize) -> Result<(), String> {
|
|
|
|
let mut new_set = WorkerSet::new(tree, num);
|
|
|
|
std::mem::swap(self, &mut new_set);
|
|
|
|
new_set.close()?;
|
|
|
|
Ok(())
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// fn replace(self, tree: Arc<LargeNodeRTree<TreeNode>>) -> Result<Self, String> {
|
|
|
|
// let num=self.num();
|
|
|
|
// return self.resize(tree.clone(),num);
|
|
|
|
// }
|
|
|
|
|
|
|
|
fn close(self) -> Result<(), String> {
|
2022-02-23 21:45:59 +00:00
|
|
|
#[cfg(feature = "profiling")]
|
|
|
|
let _span = span!(Level::INFO, "nb_worker:close");
|
2020-03-28 13:53:52 +00:00
|
|
|
if let WorkerSet::Workers {
|
|
|
|
mut handles,
|
|
|
|
tx,
|
|
|
|
rx,
|
|
|
|
} = self
|
|
|
|
{
|
|
|
|
let t_start = Instant::now();
|
2020-02-05 23:24:24 +00:00
|
|
|
loop {
|
2020-06-16 13:38:31 +00:00
|
|
|
if rx.is_empty() && tx.is_empty() {
|
2020-02-05 23:24:24 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
rx.try_iter().for_each(|_| {});
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
for _ in &handles {
|
|
|
|
match tx.send(None) {
|
2020-03-28 13:53:52 +00:00
|
|
|
Ok(_) => {}
|
2020-02-05 23:24:24 +00:00
|
|
|
Err(e) => {
|
2020-03-28 13:53:52 +00:00
|
|
|
return Err(format!("{:?}", e));
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
drop(tx);
|
2020-03-28 13:53:52 +00:00
|
|
|
while let Some(handle) = handles.pop() {
|
2020-02-05 23:24:24 +00:00
|
|
|
handle.join().unwrap();
|
|
|
|
}
|
|
|
|
drop(rx);
|
2022-02-23 21:45:59 +00:00
|
|
|
info!(
|
2020-06-16 13:38:31 +00:00
|
|
|
"workerset cleared in {}",
|
|
|
|
format_duration(t_start.elapsed())
|
|
|
|
);
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
fn queue_size(&self) -> usize {
|
|
|
|
match self {
|
|
|
|
WorkerSet::Empty => 0,
|
2020-03-28 13:53:52 +00:00
|
|
|
WorkerSet::Workers { rx, tx, .. } => tx.len() + rx.len(),
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn queue_empty(&self) -> bool {
|
2020-03-28 13:53:52 +00:00
|
|
|
return self.queue_size() == 0;
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn send(&self, wu: WorkUnit) -> Result<(), SendError<Option<WorkUnit>>> {
|
|
|
|
match self {
|
|
|
|
WorkerSet::Empty => {
|
|
|
|
panic!("send() on empty WorkerSet");
|
|
|
|
}
|
|
|
|
WorkerSet::Workers { tx, .. } => {
|
|
|
|
return tx.send(Some(wu));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn num(&self) -> usize {
|
|
|
|
match self {
|
|
|
|
WorkerSet::Empty => 1,
|
|
|
|
WorkerSet::Workers { handles, .. } => handles.len(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_empty(&self) -> bool {
|
|
|
|
match self {
|
|
|
|
WorkerSet::Empty => true,
|
|
|
|
WorkerSet::Workers { handles, .. } => handles.len() == 0,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
fn recv(&self) -> Result<Vec<WorkUnit>, String> {
|
2020-02-05 23:24:24 +00:00
|
|
|
match self {
|
2022-02-23 21:45:59 +00:00
|
|
|
WorkerSet::Empty => Ok(vec![]),
|
|
|
|
WorkerSet::Workers { rx, .. } => rx.recv().map_err(|e| format!("{:?}", e)),
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
// impl Iterator<Item = &TreeNode>
|
|
|
|
|
2020-02-05 23:24:24 +00:00
|
|
|
// fn join(mut self) -> thread::Result<()> {
|
|
|
|
// drop(self.tx);
|
|
|
|
// drop(self.rx);
|
|
|
|
// let ret: thread::Result<Vec<_>> = self.handles.drain(..).map(|v| v.join()).collect();
|
|
|
|
// ret?;
|
|
|
|
// return Ok(());
|
|
|
|
// }
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Clone, Copy)]
|
|
|
|
enum BiDirNode {
|
|
|
|
Forward(TreeNode),
|
|
|
|
Backwards(TreeNode),
|
|
|
|
}
|
|
|
|
|
2020-03-28 13:53:52 +00:00
|
|
|
#[derive(Derivative)]
|
|
|
|
#[derivative(Debug)]
|
2020-02-05 23:24:24 +00:00
|
|
|
pub struct Router {
|
2020-03-28 13:53:52 +00:00
|
|
|
#[derivative(Debug = "ignore")]
|
2020-02-05 23:24:24 +00:00
|
|
|
tree: Arc<LargeNodeRTree<TreeNode>>,
|
2020-03-28 13:53:52 +00:00
|
|
|
#[derivative(Debug = "ignore")]
|
2022-02-23 21:45:59 +00:00
|
|
|
scoopable: FxHashSet<u32>,
|
2020-03-28 13:53:52 +00:00
|
|
|
#[derivative(Debug = "ignore")]
|
2022-02-23 21:45:59 +00:00
|
|
|
pub route_tree: Option<FxHashMap<u32, u32>>,
|
2020-06-16 13:38:31 +00:00
|
|
|
|
2020-03-28 13:53:52 +00:00
|
|
|
#[derivative(Debug = "ignore")]
|
2020-02-05 23:24:24 +00:00
|
|
|
pub cache: Option<Arc<Mutex<LineCache>>>,
|
2020-06-16 13:38:31 +00:00
|
|
|
|
2020-03-28 13:53:52 +00:00
|
|
|
pub path: PathBuf,
|
2020-06-16 13:38:31 +00:00
|
|
|
pub primary_only: bool,
|
|
|
|
|
2020-03-28 13:53:52 +00:00
|
|
|
#[derivative(Debug = "ignore")]
|
2020-02-05 23:24:24 +00:00
|
|
|
workers: WorkerSet,
|
2020-03-28 13:53:52 +00:00
|
|
|
#[derivative(Debug = "ignore")]
|
2022-02-23 21:45:59 +00:00
|
|
|
pub callback: Option<RouterCallback>,
|
|
|
|
filter: Option<eval::Function>,
|
|
|
|
weight: Option<eval::Function>,
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
impl Default for Router {
|
|
|
|
fn default() -> Self {
|
2020-06-16 13:38:31 +00:00
|
|
|
Self {
|
2020-03-28 13:53:52 +00:00
|
|
|
tree: Arc::new(LargeNodeRTree::default()),
|
2022-02-23 21:45:59 +00:00
|
|
|
scoopable: FxHashSet::default(),
|
2020-03-28 13:53:52 +00:00
|
|
|
route_tree: None,
|
|
|
|
cache: None,
|
2022-02-23 21:45:59 +00:00
|
|
|
callback: None,
|
2020-06-16 13:38:31 +00:00
|
|
|
primary_only: false,
|
2020-03-28 13:53:52 +00:00
|
|
|
workers: WorkerSet::Empty,
|
|
|
|
path: PathBuf::from(""),
|
2022-02-23 21:45:59 +00:00
|
|
|
filter: None,
|
|
|
|
weight: None,
|
2020-06-16 13:38:31 +00:00
|
|
|
}
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Router {
|
|
|
|
pub fn new() -> Self {
|
|
|
|
Self::default()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_callback(&mut self, callback: RouterCallback) {
|
|
|
|
self.callback = Some(callback);
|
|
|
|
}
|
2020-03-28 13:53:52 +00:00
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
pub fn unload(&mut self) {
|
|
|
|
self.path = PathBuf::from("");
|
|
|
|
self.tree = Arc::new(LargeNodeRTree::bulk_load_with_params(vec![]));
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg_attr(feature = "profiling", tracing::instrument)]
|
|
|
|
pub fn load(&mut self, path: &Path) -> Result<(), String> {
|
|
|
|
if self.path == path {
|
2020-03-28 13:53:52 +00:00
|
|
|
return Ok(());
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut scoopable = FxHashSet::default();
|
|
|
|
let mut reader = match csv::ReaderBuilder::new().has_headers(false).from_path(path) {
|
2020-02-05 23:24:24 +00:00
|
|
|
Ok(rdr) => rdr,
|
|
|
|
Err(e) => {
|
|
|
|
return Err(format!("Error opening {}: {}", path.display(), e));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
let t_load = Instant::now();
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Loading [{}]", path.display());
|
2020-02-05 23:24:24 +00:00
|
|
|
let systems: Vec<TreeNode> = reader
|
2022-02-23 21:45:59 +00:00
|
|
|
.deserialize::<System>()
|
|
|
|
.map(|res| {
|
|
|
|
let sys = res.map_err(|e| format!("{}", e))?;
|
|
|
|
if (sys.mult > 1.0f32) || (sys.has_scoopable) {
|
2020-02-05 23:24:24 +00:00
|
|
|
scoopable.insert(sys.id);
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
Ok(sys.to_node())
|
2020-02-05 23:24:24 +00:00
|
|
|
})
|
2022-02-23 21:45:59 +00:00
|
|
|
.collect::<Result<Vec<TreeNode>, String>>()?;
|
|
|
|
info!(
|
2020-02-05 23:24:24 +00:00
|
|
|
"{} Systems loaded in {}",
|
|
|
|
systems.len(),
|
|
|
|
format_duration(t_load.elapsed())
|
|
|
|
);
|
|
|
|
let t_load = Instant::now();
|
2020-03-28 13:53:52 +00:00
|
|
|
self.tree = Arc::new(LargeNodeRTree::bulk_load_with_params(systems));
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("R*-Tree built in {}", format_duration(t_load.elapsed()));
|
|
|
|
self.path = PathBuf::from(path);
|
|
|
|
self.cache = LineCache::new(path)
|
|
|
|
.map_err(|e| error!("Error creating cache: {}", e))
|
|
|
|
.ok();
|
2020-03-28 13:53:52 +00:00
|
|
|
self.scoopable = scoopable;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
pub fn get(&self, id: u32) -> Result<Option<System>, String> {
|
|
|
|
let mut cache = self.cache.as_ref().unwrap().lock().unwrap();
|
|
|
|
cache.get(id)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn get_tree(&self) -> &LargeNodeRTree<TreeNode> {
|
|
|
|
return &*self.tree;
|
2020-06-16 13:38:31 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn start_workers(&mut self, num: usize) -> Result<(), String> {
|
2022-02-23 21:45:59 +00:00
|
|
|
self.workers.resize(self.tree.clone(), num)
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
#[cfg_attr(feature = "profiling", tracing::instrument)]
|
|
|
|
pub fn from_file(filename: &Path) -> Result<(PathBuf, f32, Self), String> {
|
2020-02-05 23:24:24 +00:00
|
|
|
let mut reader = BufReader::new(match File::open(&filename) {
|
|
|
|
Ok(fh) => fh,
|
2020-03-28 13:53:52 +00:00
|
|
|
Err(e) => return Err(format!("Error opening file {}: {}", filename.display(), e)),
|
2020-02-05 23:24:24 +00:00
|
|
|
});
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Loading {}", filename.display());
|
2020-02-05 23:24:24 +00:00
|
|
|
let (primary, range, file_hash, path, route_tree): (
|
|
|
|
bool,
|
|
|
|
f32,
|
|
|
|
Vec<u8>,
|
|
|
|
PathBuf,
|
2022-02-23 21:45:59 +00:00
|
|
|
FxHashMap<u32, u32>,
|
2020-02-05 23:24:24 +00:00
|
|
|
) = match bincode::deserialize_from(&mut reader) {
|
|
|
|
Ok(res) => res,
|
2020-03-28 13:53:52 +00:00
|
|
|
Err(e) => return Err(format!("Error loading file {}: {}", filename.display(), e)),
|
2020-02-05 23:24:24 +00:00
|
|
|
};
|
|
|
|
if hash_file(&path) != file_hash {
|
|
|
|
return Err("File hash mismatch!".to_string());
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let cache =
|
|
|
|
Some(LineCache::new(&path).map_err(|e| format!("Error creating cache: {}", e))?);
|
2020-02-05 23:24:24 +00:00
|
|
|
Ok((
|
|
|
|
path.clone(),
|
2020-03-28 13:53:52 +00:00
|
|
|
range,
|
2020-02-05 23:24:24 +00:00
|
|
|
Self {
|
|
|
|
tree: Arc::new(RTree::default()),
|
2022-02-23 21:45:59 +00:00
|
|
|
scoopable: FxHashSet::default(),
|
2020-02-05 23:24:24 +00:00
|
|
|
route_tree: Some(route_tree),
|
|
|
|
cache,
|
|
|
|
path,
|
2022-02-23 21:45:59 +00:00
|
|
|
callback: None,
|
2020-06-16 13:38:31 +00:00
|
|
|
primary_only: primary,
|
2020-02-05 23:24:24 +00:00
|
|
|
workers: WorkerSet::Empty,
|
2022-02-23 21:45:59 +00:00
|
|
|
filter: None,
|
|
|
|
weight: None,
|
2020-02-05 23:24:24 +00:00
|
|
|
},
|
|
|
|
))
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
pub fn closest(&self, center: &[f32; 3]) -> Option<System> {
|
|
|
|
self.tree.nearest_neighbor(center)?.get(self).unwrap()
|
|
|
|
}
|
|
|
|
|
2020-02-05 23:24:24 +00:00
|
|
|
fn points_in_sphere(&self, center: &[f32; 3], radius: f32) -> impl Iterator<Item = &TreeNode> {
|
|
|
|
self.tree.locate_within_distance(*center, radius * radius)
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
pub fn neighbours(&self, node: &TreeNode, range: f32) -> impl Iterator<Item = &TreeNode> {
|
|
|
|
self.points_in_sphere(&node.pos, range)
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn neighbours_r(&self, node: &TreeNode, range: f32) -> impl Iterator<Item = &TreeNode> {
|
2020-06-16 13:38:31 +00:00
|
|
|
let pos = node.pos;
|
2022-02-23 21:45:59 +00:00
|
|
|
let r2 = range * range;
|
2020-03-28 13:53:52 +00:00
|
|
|
self.points_in_sphere(&node.pos, range * 4.0)
|
|
|
|
.filter(move |s| {
|
2022-02-23 21:45:59 +00:00
|
|
|
return s.dist2(&pos) < (r2 * s.get_mult() * s.get_mult());
|
2020-03-28 13:53:52 +00:00
|
|
|
})
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
fn line_dist(&self, node: &TreeNode, start: &TreeNode, end: &TreeNode) -> f32 {
|
|
|
|
let c = dist(&start.pos, &end.pos);
|
|
|
|
let a = dist(&node.pos, &end.pos);
|
|
|
|
let b = dist(&start.pos, &node.pos);
|
|
|
|
return ((a + b + c) * (-a + b + c) * (a - b + c) * (a + b - c)).sqrt() / (c * 2.0);
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
fn valid(&self, id: u32) -> bool {
|
|
|
|
if self.scoopable.is_empty() {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return self.scoopable.contains(&id);
|
2020-06-16 13:38:31 +00:00
|
|
|
}
|
|
|
|
|
2022-06-14 21:00:50 +00:00
|
|
|
pub fn bfs_loop_test(&self, range: f32, source: &TreeNode, goal: &TreeNode, n: usize) -> (bool, usize, usize) {
|
|
|
|
// info!("Starting thread pool");
|
|
|
|
// ThreadPoolBuilder::new()
|
|
|
|
// .num_threads(8)
|
|
|
|
// .build_global()
|
|
|
|
// .unwrap();
|
|
|
|
let t_start = Instant::now();
|
|
|
|
let route_dist = dist(&source.pos, &goal.pos);
|
|
|
|
let seen: Arc<DashMap<u32, u32>> = Arc::new(DashMap::new());
|
|
|
|
let mut depth = 0;
|
|
|
|
let mut queue = vec![*source];
|
|
|
|
let mut queue_next = vec![];
|
|
|
|
let tree = self.tree.clone();
|
|
|
|
let r2 = range * range;
|
|
|
|
let mut found = false;
|
|
|
|
while !queue.is_empty() {
|
|
|
|
depth += 1;
|
|
|
|
let seen = seen.clone();
|
|
|
|
queue_next.extend(queue.drain(..).flat_map(|sys| {
|
|
|
|
let seen = seen.clone();
|
|
|
|
tree.locate_within_distance(sys.pos, r2)
|
|
|
|
.filter_map(move |nb| seen.insert(nb.id, sys.id).is_none().then_some(*nb))
|
|
|
|
}));
|
|
|
|
if seen.contains_key(&goal.id) {
|
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
std::mem::swap(&mut queue_next, &mut queue);
|
|
|
|
if n != 0 {
|
|
|
|
queue.sort_by_cached_key(|v| F32(heuristic(range, v, goal)));
|
|
|
|
queue.truncate(n);
|
|
|
|
}
|
|
|
|
// info!("[{}|{}] {}", goal.id, depth, queue.len());
|
|
|
|
}
|
|
|
|
let seen = Arc::try_unwrap(seen)
|
|
|
|
.unwrap()
|
|
|
|
.into_iter()
|
|
|
|
.collect::<FxHashMap<u32, u32>>();
|
|
|
|
info!(
|
|
|
|
"[{}|{}->{} ({:.02} Ly)|{}] Depth: {} Seen: {} ({:.02}%) Took: {}",
|
|
|
|
n,
|
|
|
|
source.id,
|
|
|
|
goal.id,
|
|
|
|
route_dist,
|
|
|
|
found,
|
|
|
|
depth,
|
|
|
|
seen.len(),
|
|
|
|
((seen.len() as f64) / (tree.size() as f64)) * 100.0,
|
|
|
|
humantime::format_duration(t_start.elapsed())
|
|
|
|
);
|
|
|
|
return (found, depth, seen.len());
|
|
|
|
let path=self.reconstruct(goal.id, &seen);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn reconstruct(&self, goal_id: u32, map: &FxHashMap<u32, u32>) -> Result<Vec<System>, String> {
|
|
|
|
let mut path = vec![];
|
|
|
|
let mut current = goal_id;
|
|
|
|
while let Some(next) = map.get(¤t) {
|
|
|
|
path.push(
|
|
|
|
self.get(*next)?
|
|
|
|
.ok_or(format!("System ID {} not found", next))?,
|
|
|
|
);
|
|
|
|
current = *next;
|
|
|
|
}
|
|
|
|
path.reverse();
|
|
|
|
Ok(path)
|
|
|
|
}
|
|
|
|
|
2020-06-16 13:38:31 +00:00
|
|
|
fn best_multiroute(
|
2020-03-28 13:53:52 +00:00
|
|
|
&mut self,
|
2020-02-05 23:24:24 +00:00
|
|
|
waypoints: &[System],
|
2020-03-28 13:53:52 +00:00
|
|
|
range: f32,
|
2020-02-05 23:24:24 +00:00
|
|
|
keep: (bool, bool),
|
2022-02-23 21:45:59 +00:00
|
|
|
mode: ModeConfig,
|
|
|
|
_max_dist: f32,
|
2020-03-28 13:53:52 +00:00
|
|
|
num_workers: usize,
|
2020-02-05 23:24:24 +00:00
|
|
|
) -> Result<Vec<System>, String> {
|
|
|
|
let mut best_score: f32 = std::f32::MAX;
|
|
|
|
let mut waypoints = waypoints.to_owned();
|
|
|
|
let mut best_permutation_waypoints = waypoints.to_owned();
|
|
|
|
let first = waypoints.first().cloned();
|
|
|
|
let last = waypoints.last().cloned();
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Finding best permutation of hops...");
|
2020-02-05 23:24:24 +00:00
|
|
|
while waypoints.prev_permutation() {}
|
|
|
|
loop {
|
|
|
|
let c_first = waypoints.first().cloned();
|
|
|
|
let c_last = waypoints.last().cloned();
|
|
|
|
let valid = (keep.0 && (c_first == first)) && (keep.1 && (c_last == last));
|
|
|
|
if valid {
|
|
|
|
let mut total_d = 0.0;
|
|
|
|
for pair in waypoints.windows(2) {
|
|
|
|
match pair {
|
|
|
|
[src, dst] => {
|
|
|
|
total_d += src.distp2(dst);
|
|
|
|
}
|
|
|
|
_ => return Err("Invalid routing parameters!".to_string()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if total_d < best_score {
|
|
|
|
best_score = total_d;
|
|
|
|
best_permutation_waypoints = waypoints.to_owned();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !waypoints.next_permutation() {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Best permutation: {:?}", best_permutation_waypoints);
|
|
|
|
self.multiroute(&best_permutation_waypoints, range, mode, num_workers)
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
2020-06-16 13:38:31 +00:00
|
|
|
fn multiroute(
|
2020-03-28 13:53:52 +00:00
|
|
|
&mut self,
|
2020-02-05 23:24:24 +00:00
|
|
|
waypoints: &[System],
|
2020-03-28 13:53:52 +00:00
|
|
|
range: f32,
|
2022-02-23 21:45:59 +00:00
|
|
|
mode: ModeConfig,
|
2020-03-28 13:53:52 +00:00
|
|
|
num_workers: usize,
|
2020-02-05 23:24:24 +00:00
|
|
|
) -> Result<Vec<System>, String> {
|
2020-03-28 13:53:52 +00:00
|
|
|
if self.tree.size() == 0 {
|
|
|
|
return Err("No Systems loaded, pleased load some with the 'load' method!".to_string());
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
if num_workers != 0 {
|
2020-03-28 13:53:52 +00:00
|
|
|
self.start_workers(num_workers)?;
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut route = vec![];
|
2020-02-05 23:24:24 +00:00
|
|
|
for pair in waypoints.windows(2) {
|
|
|
|
match pair {
|
|
|
|
[src, dst] => {
|
|
|
|
let d_total = dist(&src.pos, &dst.pos);
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Plotting route from [{}] to [{}]...", src.name, dst.name);
|
|
|
|
info!(
|
2020-02-05 23:24:24 +00:00
|
|
|
"Jump Range: {} Ly, Distance: {} Ly, Estimated Jumps: {}",
|
2020-03-28 13:53:52 +00:00
|
|
|
range,
|
2020-02-05 23:24:24 +00:00
|
|
|
d_total,
|
2020-03-28 13:53:52 +00:00
|
|
|
d_total / range
|
2020-02-05 23:24:24 +00:00
|
|
|
);
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Mode: {}", mode);
|
|
|
|
let max_dist = std::f32::NAN;
|
|
|
|
let block = match &mode {
|
|
|
|
ModeConfig::AStar {
|
|
|
|
greedyness,
|
|
|
|
beam_width,
|
|
|
|
use_distance,
|
|
|
|
} => {
|
|
|
|
if *use_distance {
|
|
|
|
todo!();
|
|
|
|
}
|
|
|
|
self.route_astar(src, dst, *greedyness, beam_width, range, max_dist)
|
|
|
|
}
|
|
|
|
ModeConfig::DepthFirst => self.route_dfs(src, dst, range),
|
|
|
|
ModeConfig::BreadthFirst { beam_width } => {
|
|
|
|
self.route_bfs(src, dst, range, beam_width, max_dist)
|
|
|
|
}
|
|
|
|
ModeConfig::IncrementalBroadening => {
|
|
|
|
self.route_incremental_broadening(range)
|
|
|
|
}
|
|
|
|
ModeConfig::Dijkstra => self.route_dijkstra(range),
|
|
|
|
ModeConfig::Ship { ship, mode } => self.route_ship(src, dst, ship, mode),
|
|
|
|
ModeConfig::BiDir => self.route_bidir(src, dst, range),
|
|
|
|
ModeConfig::Greedy => self.route_greedy(src, dst, range),
|
|
|
|
}?;
|
|
|
|
|
2020-02-05 23:24:24 +00:00
|
|
|
if route.is_empty() {
|
|
|
|
for sys in block.iter() {
|
|
|
|
route.push(sys.clone());
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for sys in block.iter().skip(1) {
|
|
|
|
route.push(sys.clone());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return Err("Invalid routing parameters!".to_owned());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut workers = WorkerSet::Empty;
|
|
|
|
std::mem::swap(&mut self.workers, &mut workers);
|
|
|
|
workers.close()?;
|
2020-02-05 23:24:24 +00:00
|
|
|
Ok(route)
|
|
|
|
}
|
|
|
|
|
2020-06-16 13:38:31 +00:00
|
|
|
fn route_astar(
|
2022-02-23 21:45:59 +00:00
|
|
|
&mut self,
|
2020-02-05 23:24:24 +00:00
|
|
|
src: &System,
|
|
|
|
dst: &System,
|
|
|
|
factor: f32,
|
2022-02-23 21:45:59 +00:00
|
|
|
beam_width: &BeamWidth,
|
2020-03-28 13:53:52 +00:00
|
|
|
range: f32,
|
2022-02-23 21:45:59 +00:00
|
|
|
max_dist: f32,
|
2020-02-05 23:24:24 +00:00
|
|
|
) -> Result<Vec<System>, String> {
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut log_file = BufWriter::new(File::create("route_log.txt").unwrap());
|
2020-02-05 23:24:24 +00:00
|
|
|
if factor == 0.0 {
|
2022-02-23 21:45:59 +00:00
|
|
|
return self.route_bfs(src, dst, range, beam_width, max_dist);
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
if (1.0 - factor).abs() < 1e-3 {
|
|
|
|
if beam_width.is_set() {
|
|
|
|
warn!("Usign greedy algorithm, ignorimg beam width!")
|
2020-06-16 13:38:31 +00:00
|
|
|
}
|
|
|
|
return self.route_greedy(src, dst, range);
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut factor = factor;
|
|
|
|
if !(0.0..=1.0).contains(&factor) {
|
|
|
|
let new_factor = factor.min(1.0).max(0.0);
|
|
|
|
warn!(
|
|
|
|
"Greedyness of {} is out of range 0.0-1.0, clamping to {}",
|
|
|
|
factor, new_factor
|
|
|
|
);
|
|
|
|
factor = new_factor;
|
|
|
|
}
|
|
|
|
let src_name = src.name.clone();
|
|
|
|
let dst_name = dst.name.clone();
|
2020-02-05 23:24:24 +00:00
|
|
|
let start_sys = src;
|
|
|
|
let goal_sys = dst;
|
2022-02-23 21:45:59 +00:00
|
|
|
let goal_node = goal_sys.to_node();
|
2020-02-05 23:24:24 +00:00
|
|
|
let d_total = dist(&start_sys.pos, &goal_sys.pos);
|
|
|
|
let mut d_rem = d_total;
|
|
|
|
|
|
|
|
let mut state = SearchState {
|
|
|
|
mode: "A-Star".into(),
|
|
|
|
depth: 0,
|
|
|
|
queue_size: 0,
|
|
|
|
d_rem: d_total,
|
|
|
|
d_total,
|
|
|
|
prc_done: 0.0,
|
|
|
|
n_seen: 0,
|
|
|
|
prc_seen: 0.0,
|
|
|
|
from: src_name.clone(),
|
|
|
|
to: dst_name.clone(),
|
2022-02-23 21:45:59 +00:00
|
|
|
system: start_sys.name.clone(),
|
2020-02-05 23:24:24 +00:00
|
|
|
};
|
|
|
|
let total = self.tree.size() as f32;
|
2022-02-23 21:45:59 +00:00
|
|
|
// let mut seen_v = vec![0u64;(self.tree.size()>>8)+1];
|
2020-02-05 23:24:24 +00:00
|
|
|
let mut t_last = Instant::now();
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut prev = FxHashMap::default();
|
|
|
|
let mut seen: FxHashMap<u32, f32> = FxHashMap::default();
|
2020-02-05 23:24:24 +00:00
|
|
|
let mut found = false;
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut queue: MinFHeap<(usize, TreeNode)> = MinFHeap::new();
|
|
|
|
let h = distm(&start_sys.pos, &goal_sys.pos) / range;
|
|
|
|
queue.push(
|
|
|
|
h,
|
|
|
|
(
|
|
|
|
0, // depth
|
|
|
|
start_sys.to_node(),
|
|
|
|
),
|
|
|
|
);
|
2020-06-16 13:38:31 +00:00
|
|
|
seen.insert(start_sys.id, 0.0);
|
2020-02-05 23:24:24 +00:00
|
|
|
while !found {
|
2022-02-23 21:45:59 +00:00
|
|
|
while let Some((_, (depth, node))) = queue.pop() {
|
|
|
|
writeln!(log_file, "{},{}", node.id, depth).unwrap();
|
2020-02-05 23:24:24 +00:00
|
|
|
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
|
|
|
|
let sys = node
|
2022-02-23 21:45:59 +00:00
|
|
|
.get(self)?
|
2020-06-16 13:38:31 +00:00
|
|
|
.unwrap_or_else(|| panic!("System-ID {} not found!", node.id));
|
2020-02-05 23:24:24 +00:00
|
|
|
t_last = Instant::now();
|
|
|
|
state.depth = depth;
|
|
|
|
state.queue_size = queue.len();
|
|
|
|
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
|
|
|
|
state.d_rem = d_rem;
|
|
|
|
state.n_seen = seen.len();
|
|
|
|
state.prc_seen = ((seen.len() * 100) as f32) / total;
|
2022-02-23 21:45:59 +00:00
|
|
|
state.system = sys.name.clone();
|
|
|
|
if let Some(cb) = &self.callback {
|
|
|
|
match cb(&state) {
|
|
|
|
Ok(_) => (),
|
|
|
|
Err(e) => {
|
|
|
|
return Err(format!("{:?}", e));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
if node.id == goal_sys.id {
|
|
|
|
queue.clear();
|
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-06-16 13:38:31 +00:00
|
|
|
|
|
|
|
let new_nodes: Vec<_> = self
|
2022-02-23 21:45:59 +00:00
|
|
|
.neighbours(&node, node.get_mult() * range)
|
2020-06-16 13:38:31 +00:00
|
|
|
.filter(|nb| (self.valid(nb.id) || (nb.id == goal_sys.id)))
|
|
|
|
.filter(|nb| !seen.contains_key(&nb.id))
|
|
|
|
.map(|nb| {
|
|
|
|
prev.insert(nb.id, node);
|
|
|
|
let d_g = nb.distp(goal_sys);
|
|
|
|
if d_g < d_rem {
|
|
|
|
d_rem = d_g;
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let h = heuristic(range, nb, &goal_node) * factor;
|
|
|
|
let f = (depth as f32) * (1.0 - factor);
|
|
|
|
(h + f, (depth + 1, *nb))
|
2020-06-16 13:38:31 +00:00
|
|
|
})
|
|
|
|
.collect();
|
2022-02-23 21:45:59 +00:00
|
|
|
for (w, node) in new_nodes {
|
|
|
|
seen.insert(node.1.id, 0.0);
|
|
|
|
queue.push(w, node);
|
2020-06-16 13:38:31 +00:00
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
if queue.is_empty() {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !found {
|
|
|
|
return Err(format!("No route from {} to {} found!", src_name, dst_name));
|
|
|
|
}
|
|
|
|
let mut v: Vec<System> = Vec::new();
|
|
|
|
let mut curr_sys = goal_sys.clone();
|
|
|
|
loop {
|
|
|
|
v.push(curr_sys.clone());
|
|
|
|
match prev.get(&curr_sys.id) {
|
2022-02-23 21:45:59 +00:00
|
|
|
Some(sys) => curr_sys = sys.get(self)?.unwrap(),
|
2020-02-05 23:24:24 +00:00
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
v.reverse();
|
|
|
|
Ok(v)
|
|
|
|
}
|
|
|
|
|
2020-06-16 13:38:31 +00:00
|
|
|
fn route_greedy(&self, src: &System, dst: &System, range: f32) -> Result<Vec<System>, String> {
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut log_file = BufWriter::new(File::create("route_log.txt").unwrap());
|
|
|
|
let src_name = src.name.clone();
|
|
|
|
let dst_name = dst.name.clone();
|
2020-02-05 23:24:24 +00:00
|
|
|
let start_sys = src;
|
|
|
|
let goal_sys = dst;
|
2022-02-23 21:45:59 +00:00
|
|
|
let start_node = src.to_node();
|
|
|
|
let goal_node = dst.to_node();
|
2020-02-05 23:24:24 +00:00
|
|
|
let d_total = dist(&start_sys.pos, &goal_sys.pos);
|
|
|
|
let mut d_rem = d_total;
|
|
|
|
let mut state = SearchState {
|
|
|
|
mode: "Greedy".into(),
|
|
|
|
depth: 0,
|
|
|
|
queue_size: 0,
|
|
|
|
d_rem: d_total,
|
|
|
|
d_total,
|
|
|
|
prc_done: 0.0,
|
|
|
|
n_seen: 0,
|
|
|
|
prc_seen: 0.0,
|
|
|
|
from: src_name.clone(),
|
|
|
|
to: dst_name.clone(),
|
2022-02-23 21:45:59 +00:00
|
|
|
system: start_sys.name.clone(),
|
2020-02-05 23:24:24 +00:00
|
|
|
};
|
|
|
|
let total = self.tree.size() as f32;
|
|
|
|
let mut t_last = Instant::now();
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut prev = FxHashMap::default();
|
|
|
|
let mut seen: FxHashMap<u32, f32> = FxHashMap::default();
|
2020-02-05 23:24:24 +00:00
|
|
|
let mut found = false;
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut queue: MinFHeap<(usize, TreeNode)> = MinFHeap::new();
|
|
|
|
queue.push(
|
|
|
|
heuristic(range, &start_node, &goal_node),
|
|
|
|
(0, start_sys.to_node()),
|
|
|
|
);
|
2020-06-16 13:38:31 +00:00
|
|
|
seen.insert(start_sys.id, 0.0);
|
2020-02-05 23:24:24 +00:00
|
|
|
while !found {
|
2022-02-23 21:45:59 +00:00
|
|
|
while let Some((_, (depth, node))) = queue.pop() {
|
|
|
|
writeln!(log_file, "{},{}", node.id, depth).unwrap();
|
2020-02-05 23:24:24 +00:00
|
|
|
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
|
|
|
|
let sys = node
|
2022-02-23 21:45:59 +00:00
|
|
|
.get(self)?
|
2020-06-16 13:38:31 +00:00
|
|
|
.unwrap_or_else(|| panic!("System-ID {} does not exist!", &node.id));
|
2020-02-05 23:24:24 +00:00
|
|
|
t_last = Instant::now();
|
|
|
|
state.depth = depth;
|
|
|
|
state.queue_size = queue.len();
|
|
|
|
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
|
|
|
|
state.d_rem = d_rem;
|
|
|
|
state.n_seen = seen.len();
|
|
|
|
state.prc_seen = ((seen.len() * 100) as f32) / total;
|
2022-02-23 21:45:59 +00:00
|
|
|
state.system = sys.name.clone();
|
|
|
|
if let Some(cb) = &self.callback {
|
|
|
|
match cb(&state) {
|
|
|
|
Ok(_) => (),
|
|
|
|
Err(e) => {
|
|
|
|
return Err(format!("{:?}", e));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
if node.id == goal_sys.id {
|
|
|
|
queue.clear();
|
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
2020-06-16 13:38:31 +00:00
|
|
|
let new_nodes: Vec<_> = self
|
2022-02-23 21:45:59 +00:00
|
|
|
.neighbours(&node, node.get_mult() * range)
|
2020-06-16 13:38:31 +00:00
|
|
|
.filter(|nb| (self.valid(nb.id) || (nb.id == goal_sys.id)))
|
|
|
|
.filter(|nb| !seen.contains_key(&nb.id))
|
|
|
|
.map(|nb| {
|
|
|
|
prev.insert(nb.id, node);
|
|
|
|
let d_g = nb.distp(goal_sys);
|
|
|
|
if d_g < d_rem {
|
|
|
|
d_rem = d_g;
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
*nb
|
2020-06-16 13:38:31 +00:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
for node in new_nodes {
|
2022-02-23 21:45:59 +00:00
|
|
|
seen.insert(node.id, 0.0);
|
|
|
|
let h = heuristic(range, &node, &goal_node);
|
|
|
|
queue.push(h, (depth + 1, node));
|
2020-06-16 13:38:31 +00:00
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
if queue.is_empty() {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !found {
|
|
|
|
return Err(format!("No route from {} to {} found!", src_name, dst_name));
|
|
|
|
}
|
|
|
|
let mut v: Vec<System> = Vec::new();
|
|
|
|
let mut curr_sys = goal_sys.clone();
|
|
|
|
loop {
|
|
|
|
v.push(curr_sys.clone());
|
|
|
|
match prev.get(&curr_sys.id) {
|
2022-02-23 21:45:59 +00:00
|
|
|
Some(sys) => curr_sys = sys.get(self)?.unwrap(),
|
2020-02-05 23:24:24 +00:00
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
v.reverse();
|
|
|
|
Ok(v)
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
fn precomp_spt(
|
|
|
|
&mut self,
|
|
|
|
root: TreeNode,
|
|
|
|
range: f32,
|
|
|
|
skiplist: &[bool],
|
|
|
|
_parent_tree: &Option<String>,
|
|
|
|
) -> PrecompTree {
|
|
|
|
let t_start = Instant::now();
|
|
|
|
let skiplist_size = skiplist.iter().filter(|&&v| v).count();
|
|
|
|
let mut skipped: usize = 0;
|
|
|
|
let mut edges = 0usize;
|
|
|
|
let mut n: usize = 0;
|
|
|
|
let mut prev = vec![std::u32::MAX; self.tree.size()];
|
|
|
|
let root_id = root.id;
|
|
|
|
{
|
|
|
|
let mut to_visit = FxHashSet::default();
|
|
|
|
let mut visited = FxHashSet::default();
|
|
|
|
let (tx, rx, mut handles) = self.neighbor_workers(8, range);
|
|
|
|
let chunks = skiplist.iter().enumerate().map(|(id, v)| (id as u32, v));
|
|
|
|
let chunks = chunks
|
|
|
|
.inspect(|&(id, _)| {
|
|
|
|
visited.insert(id as u32);
|
|
|
|
if id % 100_000 == 0 {
|
|
|
|
println!("ID: {}", id);
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.filter(|(_, v)| !*v)
|
|
|
|
.map(|(id, _)| id);
|
|
|
|
for chunk in &chunks.chunks(100_000) {
|
|
|
|
let chunk = chunk.collect_vec();
|
|
|
|
let res = self
|
|
|
|
.get_systems_by_ids(&chunk)
|
|
|
|
.unwrap()
|
|
|
|
.values()
|
|
|
|
.map(|sys| sys.to_node())
|
|
|
|
.collect_vec();
|
|
|
|
tx.send(res).unwrap();
|
|
|
|
for res in rx.try_iter() {
|
|
|
|
for (_node, nbs) in res {
|
|
|
|
nbs.iter().for_each(|nb| {
|
|
|
|
to_visit.insert(nb.id);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
drop(tx);
|
|
|
|
while let Ok(res) = rx.recv() {
|
|
|
|
for (_node, nbs) in res {
|
|
|
|
nbs.iter().for_each(|nb| {
|
|
|
|
to_visit.insert(nb.id);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
drop(rx);
|
|
|
|
for handle in handles.drain(..) {
|
|
|
|
handle.join().unwrap();
|
|
|
|
}
|
|
|
|
println!("new queue: {} Nodes", to_visit.len());
|
|
|
|
}
|
|
|
|
let mut q = VecDeque::new();
|
|
|
|
prev[root.id as usize] = root.id;
|
|
|
|
q.push_back(root);
|
|
|
|
while let Some(node) = q.pop_front() {
|
|
|
|
if n % 100_000 == 0 {
|
|
|
|
let p_prc: f64 = ((n as f64) / (self.tree.size() as f64)) * 100.0;
|
|
|
|
let s_prc: f64 = ((skipped as f64) / (skiplist.len() as f64)) * 100.0;
|
|
|
|
println!(
|
|
|
|
"[{}] {}/{} ({:.2} %) | Q: {} | S: {}/{} ({:.2} %) ",
|
|
|
|
root_id,
|
|
|
|
n,
|
|
|
|
self.tree.size(),
|
|
|
|
p_prc,
|
|
|
|
q.len(),
|
|
|
|
skipped,
|
|
|
|
skiplist_size,
|
|
|
|
s_prc
|
|
|
|
);
|
|
|
|
}
|
|
|
|
let mut nbs = self
|
|
|
|
.neighbours(&node, node.get_mult() * range)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
nbs.sort_by_key(|n| F32(-(dist(&root.pos, &n.pos) + (n.get_mult() * range))));
|
|
|
|
for nb in nbs {
|
|
|
|
if skiplist[nb.id as usize] {
|
|
|
|
skipped += 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
edges += 1;
|
|
|
|
if prev[nb.id as usize] == std::u32::MAX {
|
|
|
|
prev[nb.id as usize] = node.id;
|
|
|
|
q.push_back(*nb);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
n += 1;
|
|
|
|
}
|
|
|
|
println!(
|
|
|
|
"{} | Nodes: {}/{}, Edges: {}",
|
|
|
|
format_duration(t_start.elapsed()),
|
|
|
|
prev.iter().filter(|&&v| v != std::u32::MAX).count(),
|
|
|
|
self.tree.size(),
|
|
|
|
edges
|
|
|
|
);
|
|
|
|
if skiplist_size == 0 {
|
|
|
|
return PrecompTree::Full {
|
|
|
|
id: root_id,
|
|
|
|
map: prev,
|
|
|
|
};
|
|
|
|
} else {
|
|
|
|
let mut spt_map = FxHashMap::default();
|
|
|
|
for (n, &v) in prev.iter().enumerate() {
|
|
|
|
if v == std::u32::MAX || n == (v as usize) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
spt_map.insert(n as u32, v);
|
|
|
|
}
|
|
|
|
todo!();
|
|
|
|
// return PrecompTree::Partial {
|
|
|
|
// parent: parent_tree.clone().unwrap(),
|
|
|
|
// remove: FxHashSet::default(),
|
|
|
|
// links: spt_map,
|
|
|
|
// };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn compute_best_diff(&self, paths: &[&str]) -> Result<(u32, u32, Vec<(u32, u32)>), String> {
|
|
|
|
// let inverse_spt = FxHashMap<u32, FxHashSet<u32>>
|
|
|
|
let mut trees = Vec::new();
|
|
|
|
for &path in paths {
|
|
|
|
let reader = BufReader::new(File::open(&path).unwrap());
|
|
|
|
let spt: PrecompTree = bincode::deserialize_from(reader).unwrap();
|
|
|
|
let spt = match spt {
|
|
|
|
PrecompTree::Full { id, map } => (id, map),
|
|
|
|
PrecompTree::Partial { .. } => return Err("Need full tree!".to_owned()),
|
|
|
|
};
|
|
|
|
trees.push(spt);
|
|
|
|
}
|
|
|
|
let mut best = (std::usize::MAX, (0, 0, vec![]));
|
|
|
|
for (i1, (id_1, t1)) in trees.iter().enumerate() {
|
|
|
|
for (_i2, (id_2, t2)) in trees.iter().enumerate().skip(i1 + 1) {
|
|
|
|
if t1.len() != t2.len() {
|
|
|
|
println!("Length missmatch between {} and {}", id_1, id_2);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
let diff: Vec<(u32, u32)> = t1
|
|
|
|
.iter()
|
|
|
|
.zip(t2)
|
|
|
|
.enumerate()
|
|
|
|
.filter(|(_, (a, b))| a != b)
|
|
|
|
.map(|(i, (_, b))| (i as u32, *b))
|
|
|
|
.collect();
|
|
|
|
if diff.len() < best.0 {
|
|
|
|
best = (diff.len(), (*id_1, *id_2, diff));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return Ok(best.1);
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn precomp_bfs(&mut self, range: f32) -> Result<(), String> {
|
|
|
|
// add subtree nodes to visited set
|
|
|
|
// add all neighbors of visited nodes which are not in visited set to search queue
|
|
|
|
// =================================
|
|
|
|
// let tree_diff =
|
|
|
|
// self.compute_best_diff(&[r#"O:\spt_740186_48.dat"#, r#"O:\spt_32861765_48.dat"#])?;
|
|
|
|
// let out_path = format!(r#"O:\spt_diff_{}_{}.dat"#, tree_diff.0, tree_diff.1);
|
|
|
|
// let buf_writer = BufWriter::new(File::create(&out_path).unwrap());
|
|
|
|
// bincode::serialize_into(buf_writer, &tree_diff).unwrap();
|
|
|
|
// println!("Overlap: {:.2} % ", ol * 100.0);
|
|
|
|
let mut skiplist_vec = vec![false; self.tree.size()];
|
|
|
|
let root = *self.tree.locate_at_point(&[0.0, 0.0, 0.0]).unwrap();
|
|
|
|
let out_path = format!(r#"O:\spt_{}_{}.dat"#, root.id, range);
|
|
|
|
if !Path::new(&out_path).exists() {
|
|
|
|
let spt = self.precomp_spt(root, range, &skiplist_vec, &None);
|
|
|
|
let buf_writer = BufWriter::new(File::create(&out_path).unwrap());
|
|
|
|
bincode::serialize_into(buf_writer, &spt).unwrap();
|
|
|
|
};
|
|
|
|
let reader = BufReader::new(File::open(&out_path).unwrap());
|
|
|
|
let spt: PrecompTree = bincode::deserialize_from(reader).unwrap();
|
|
|
|
let spt = match spt {
|
|
|
|
PrecompTree::Full { map, .. } => map,
|
|
|
|
PrecompTree::Partial { .. } => panic!("Need full root tree!"),
|
|
|
|
};
|
|
|
|
let t_start = Instant::now();
|
|
|
|
let mut inverse_spt: FxHashMap<u32, FxHashSet<u32>> = FxHashMap::default();
|
|
|
|
for (n, &v) in spt.iter().enumerate() {
|
|
|
|
if v == std::u32::MAX || n == (v as usize) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
inverse_spt.entry(v).or_default().insert(n as u32);
|
|
|
|
}
|
|
|
|
let tree_deg =
|
|
|
|
inverse_spt.values().map(|v| v.len() as f64).sum::<f64>() / (inverse_spt.len() as f64);
|
|
|
|
println!("Tree inversion took: {:?}", t_start.elapsed());
|
|
|
|
println!("Mean degree of SPT: {}", tree_deg);
|
|
|
|
let t_start = Instant::now();
|
|
|
|
let mut best = (FxHashSet::default(), std::u32::MAX);
|
|
|
|
for vert in inverse_spt.get(&root.id).iter().flat_map(|l| l.iter()) {
|
|
|
|
// compute all paths that terminate at vert using our reversed tree
|
|
|
|
let mut skiplist = FxHashSet::default();
|
|
|
|
let mut q = VecDeque::new();
|
|
|
|
q.push_back(*vert);
|
|
|
|
while let Some(next) = q.pop_front() {
|
|
|
|
skiplist.insert(next);
|
|
|
|
if let Some(next_nbs) = inverse_spt.get(&next) {
|
|
|
|
q.extend(next_nbs.iter().filter(|v| !skiplist.contains(v)).sorted());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if skiplist.len() > best.0.len() {
|
|
|
|
best = (skiplist, *vert);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
println!(
|
|
|
|
"Largest subtree: [{}] {} ({:.2} %)",
|
|
|
|
best.1,
|
|
|
|
best.0.len(),
|
|
|
|
((best.0.len() as f64) / (self.tree.size() as f64)) * 100.0
|
|
|
|
);
|
|
|
|
for id in best.0 {
|
|
|
|
skiplist_vec[id as usize] = true;
|
|
|
|
}
|
|
|
|
let node = self.get(best.1).unwrap().unwrap().to_node();
|
|
|
|
println!(
|
|
|
|
"Found optimal next node in {}: {:?}",
|
|
|
|
format_duration(t_start.elapsed()),
|
|
|
|
node
|
|
|
|
);
|
|
|
|
let sub_spt = self.precomp_spt(node, range, &skiplist_vec, &Some(out_path));
|
|
|
|
let out_path = format!(r#"O:\spt_{}_{}.dat"#, node.id, range);
|
|
|
|
let buf_writer = BufWriter::new(File::create(&out_path).unwrap());
|
|
|
|
bincode::serialize_into(buf_writer, &sub_spt).unwrap();
|
|
|
|
return Ok(());
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
fn route_ship(
|
|
|
|
&mut self,
|
|
|
|
start_sys: &System,
|
|
|
|
goal_sys: &System,
|
|
|
|
ship: &Ship,
|
|
|
|
mode: &ShipMode,
|
|
|
|
) -> Result<Vec<System>, String> {
|
|
|
|
let t_start = Instant::now();
|
|
|
|
let mut found = false;
|
|
|
|
let mut num: usize = 0;
|
|
|
|
let mut skipped: usize = 0;
|
|
|
|
let mut unreachable: usize = 0;
|
|
|
|
const INF: f32 = std::f32::INFINITY;
|
|
|
|
let mut queue: BinaryHeap<ShipRouteState> = BinaryHeap::new();
|
|
|
|
let mut best: FxHashMap<u32, f32> = FxHashMap::default();
|
|
|
|
let mut prev: FxHashMap<u32, u32> = FxHashMap::default();
|
|
|
|
let max_range = ship.max_range();
|
|
|
|
let start_node = start_sys.to_node();
|
|
|
|
let goal_node = goal_sys.to_node();
|
|
|
|
let state = ShipRouteState {
|
|
|
|
cost: 0.0,
|
|
|
|
fuel: ship.fuel_capacity,
|
|
|
|
node: start_node,
|
|
|
|
refuels: 0,
|
|
|
|
depth: 0,
|
|
|
|
dist: 0.0,
|
|
|
|
mode: *mode,
|
|
|
|
};
|
|
|
|
queue.push(state);
|
|
|
|
let mut last_new = Instant::now();
|
|
|
|
while let Some(state) = queue.pop() {
|
|
|
|
if state.node.id == goal_node.id {
|
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if num % 100_000 == 0 {
|
|
|
|
info!(
|
|
|
|
"D: ({}, {}) | FC: ({}, {}) | N: {} ({}) | B: {} ({}) | Q: {} | UR: {} | SK: {}",
|
|
|
|
state.depth,
|
|
|
|
state.dist,
|
|
|
|
state.refuels,
|
|
|
|
state.cost,
|
|
|
|
num,
|
|
|
|
prev.len(),
|
|
|
|
best.len(),
|
|
|
|
humantime::format_duration(last_new.elapsed()),
|
|
|
|
queue.len(),
|
|
|
|
unreachable,
|
|
|
|
skipped
|
|
|
|
);
|
|
|
|
}
|
|
|
|
num += 1;
|
|
|
|
let best_cost = *best.get(&state.node.id).unwrap_or(&INF);
|
|
|
|
if state.cost > best_cost {
|
|
|
|
skipped += 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
for nb in self.neighbours(&state.node, max_range * state.node.get_mult()) {
|
|
|
|
let mut refuels = state.refuels;
|
|
|
|
let dist = dist(&nb.pos, &state.node.pos);
|
|
|
|
let (fuel_cost, new_fuel) = {
|
2022-06-14 21:00:50 +00:00
|
|
|
if let Some(res) =
|
|
|
|
ship.fuel_cost_for_jump(state.fuel, dist, state.node.get_mult())
|
|
|
|
{
|
2022-02-23 21:45:59 +00:00
|
|
|
// can jump with current amount of fuel
|
|
|
|
res
|
|
|
|
} else if let Some(res) =
|
|
|
|
ship.fuel_cost_for_jump(ship.fuel_capacity, dist, state.node.get_mult())
|
|
|
|
{
|
|
|
|
// can jump after refuel
|
|
|
|
refuels += 1;
|
|
|
|
res
|
|
|
|
} else {
|
|
|
|
// can't jump
|
|
|
|
unreachable += 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
let next_cost = *best.get(&nb.id).unwrap_or(&INF);
|
|
|
|
let new_cost = state.cost + fuel_cost;
|
|
|
|
if new_cost < next_cost {
|
|
|
|
last_new = Instant::now();
|
|
|
|
best.insert(nb.id, new_cost);
|
|
|
|
prev.insert(nb.id, state.node.id);
|
|
|
|
queue.push(ShipRouteState {
|
|
|
|
cost: new_cost,
|
|
|
|
fuel: new_fuel,
|
|
|
|
node: *nb,
|
|
|
|
refuels,
|
|
|
|
depth: state.depth + 1,
|
|
|
|
dist: state.dist + dist,
|
|
|
|
mode: state.mode,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
info!("Took: {}", format_duration(t_start.elapsed()));
|
|
|
|
if !found {
|
|
|
|
return Err(format!(
|
|
|
|
"No route from {} to {} found!",
|
|
|
|
start_sys.name, goal_sys.name
|
|
|
|
));
|
|
|
|
}
|
|
|
|
// todo!("Fxi path reconstruction");
|
|
|
|
let mut v: Vec<System> = Vec::new();
|
|
|
|
let mut curr_sys = goal_sys.clone();
|
|
|
|
loop {
|
|
|
|
v.push(curr_sys.clone());
|
|
|
|
if curr_sys.id == start_sys.id {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
match prev.get(&curr_sys.id) {
|
|
|
|
Some(sys) => {
|
|
|
|
curr_sys = self
|
|
|
|
.get_sys(*sys)?
|
|
|
|
.ok_or(format!("System id {} not found", sys))?
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
v.reverse();
|
|
|
|
Ok(v)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn closest_neutron(&self, node: &TreeNode) -> Option<(TreeNode, f32)> {
|
|
|
|
self.tree
|
|
|
|
.nearest_neighbor_iter_with_distance_2(&node.pos)
|
|
|
|
.find_map(|(node, dist)| {
|
|
|
|
if node.get_mult() >= 3.0 {
|
|
|
|
Some((*node, (dist as f32).sqrt()))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-06-14 21:00:50 +00:00
|
|
|
pub fn floyd_warshall(&self, range: f32) -> Result<Vec<System>, String> {
|
|
|
|
let mut dist: FxHashMap<u64, usize> = FxHashMap::default();
|
|
|
|
info!("nb...");
|
|
|
|
let total = self.tree.size();
|
|
|
|
for (n, node) in self.tree.iter().enumerate() {
|
|
|
|
if (n % 100_000) == 0 {
|
|
|
|
println!("{}/{}", n, total);
|
|
|
|
}
|
|
|
|
let key = (node.id as u64) << 32;
|
|
|
|
for nb in self.neighbours(node, range) {
|
|
|
|
let key = key | nb.id as u64;
|
|
|
|
dist.entry(key).or_insert(1);
|
|
|
|
}
|
|
|
|
let key = ((node.id as u64) << 32) | node.id as u64;
|
|
|
|
dist.insert(key, 0);
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
todo!()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn route_incremental_broadening(&self, range: f32) -> Result<Vec<System>, String> {
|
|
|
|
/*
|
|
|
|
h = (dist(node,goal)-(range*node.mult)).max(0.0) // remaining distance after jumping from here
|
|
|
|
*/
|
|
|
|
let src = self.tree.nearest_neighbor(&[0.0, 0.0, 0.0]).unwrap();
|
2022-06-14 21:00:50 +00:00
|
|
|
// let mut route_log = BufWriter::new(File::create("route_log_ib.txt").map_err(|e| e.to_string())?);
|
2022-02-23 21:45:59 +00:00
|
|
|
let goal = self
|
|
|
|
.tree
|
2022-06-14 21:00:50 +00:00
|
|
|
.nearest_neighbor(&[-1111.5625, -134.21875, 65269.75]) // Beagle Point
|
|
|
|
// .nearest_neighbor(&[-9530.5, -910.28125, 19808.125]) // Colonia
|
2022-02-23 21:45:59 +00:00
|
|
|
.unwrap();
|
|
|
|
let mut best_node = FxHashMap::default();
|
2022-06-14 21:00:50 +00:00
|
|
|
// let mut prev = FxHashMap::default();
|
|
|
|
let mut queue = MinFHeap::new();
|
|
|
|
let t_start = Instant::now();
|
|
|
|
let mut n = 0usize;
|
|
|
|
let mut skipped = 0usize;
|
|
|
|
let mut global_best = u32::MAX;
|
|
|
|
queue.push(heuristic(range, src, goal), (0, src));
|
2022-02-23 21:45:59 +00:00
|
|
|
loop {
|
2022-06-14 21:00:50 +00:00
|
|
|
println!("Q: {}", queue.len());
|
|
|
|
if queue.is_empty() {
|
|
|
|
warn!(
|
|
|
|
"Visited: {} | Skipped: {} | search space exhausted after {}",
|
|
|
|
n,
|
|
|
|
skipped,
|
|
|
|
humantime::format_duration(t_start.elapsed())
|
|
|
|
);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
while let Some((_, (depth, node))) = queue.pop() {
|
|
|
|
let best_len = best_node.len();
|
|
|
|
let best_depth = best_node.entry(node.id).or_insert(depth);
|
|
|
|
if *best_depth > global_best {
|
|
|
|
skipped += 1;
|
|
|
|
continue;
|
2022-02-23 21:45:59 +00:00
|
|
|
}
|
2022-06-14 21:00:50 +00:00
|
|
|
// writeln!(route_log,"{}, {}",node.id,depth).map_err(|e| e.to_string())?;
|
|
|
|
// route_log.flush().map_err(|e| e.to_string())?;
|
|
|
|
if depth < *best_depth {
|
|
|
|
*best_depth = depth;
|
|
|
|
}
|
|
|
|
n += 1;
|
|
|
|
if node.id == goal.id {
|
|
|
|
if depth < global_best {
|
|
|
|
global_best = global_best.min(depth);
|
|
|
|
queue.retain(|(_, (d, _))| *d <= global_best);
|
2022-02-23 21:45:59 +00:00
|
|
|
info!(
|
2022-06-14 21:00:50 +00:00
|
|
|
"Queued: {}, Skipped: {}, Seen: {} (Total: {}) | Best: {} | elapsed: {}",
|
|
|
|
queue.len(),
|
2022-02-23 21:45:59 +00:00
|
|
|
skipped,
|
|
|
|
n,
|
|
|
|
best_len,
|
2022-06-14 21:00:50 +00:00
|
|
|
global_best,
|
2022-02-23 21:45:59 +00:00
|
|
|
humantime::format_duration(t_start.elapsed()).to_string()
|
|
|
|
);
|
|
|
|
}
|
2022-06-14 21:00:50 +00:00
|
|
|
continue;
|
|
|
|
} else if n % 10000 == 0 {
|
|
|
|
info!(
|
|
|
|
"Queued: {}, Skipped: {}, Seen: {} (Total: {}) | Best: {} | elapsed: {}",
|
|
|
|
queue.len(),
|
|
|
|
skipped,
|
|
|
|
n,
|
|
|
|
best_len,
|
|
|
|
global_best,
|
|
|
|
humantime::format_duration(t_start.elapsed()).to_string()
|
|
|
|
);
|
2022-02-23 21:45:59 +00:00
|
|
|
}
|
2022-06-14 21:00:50 +00:00
|
|
|
self.neighbours(node, node.get_mult() * range)
|
|
|
|
.filter(|nb| (self.valid(nb.id) || (nb.id == goal.id)))
|
|
|
|
.filter(|nb| match best_node.get(&nb.id) {
|
|
|
|
Some(&d) => depth < d,
|
|
|
|
None => true,
|
|
|
|
})
|
|
|
|
.map(|nb| (heuristic(range, nb, goal), nb))
|
|
|
|
.for_each(|(h, nb)| {
|
|
|
|
// prev.insert(nb.id, node.id);
|
|
|
|
queue.push(h, (depth + 1, nb));
|
|
|
|
});
|
2022-02-23 21:45:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
todo!()
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg_attr(feature = "profiling", tracing::instrument)]
|
|
|
|
pub fn route_dijkstra(&self, range: f32) -> Result<Vec<System>, String> {
|
|
|
|
// TODO: exit condition
|
|
|
|
let total = self.tree.size();
|
|
|
|
let mut n: usize = 0;
|
|
|
|
let mut s: usize = 0;
|
|
|
|
const INF: f32 = std::f32::INFINITY;
|
|
|
|
let mut queue: MinFHeap<(usize, TreeNode)> = MinFHeap::new();
|
|
|
|
let mut prev: FxHashMap<u32, u32> = FxHashMap::default();
|
|
|
|
let mut best: FxHashMap<u32, f32> = FxHashMap::default();
|
|
|
|
let seed = *self.tree.nearest_neighbor(&[0.0, 0.0, 0.0]).unwrap();
|
|
|
|
queue.push(0.0, (0, seed));
|
|
|
|
while let Some((d, (depth, node))) = queue.pop() {
|
|
|
|
let best_cost = *best.get(&node.id).unwrap_or(&INF);
|
|
|
|
// println!("{} {}", node.id, best_cost);
|
|
|
|
if d > best_cost {
|
|
|
|
s += 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
n += 1;
|
|
|
|
if n % 100_000 == 0 {
|
|
|
|
debug!(
|
|
|
|
"{}/{} | C: ({}, {}) | Q: {} | S: {} | B: {}",
|
|
|
|
n,
|
|
|
|
total,
|
|
|
|
depth,
|
|
|
|
d,
|
|
|
|
queue.len(),
|
|
|
|
s,
|
|
|
|
best.len()
|
|
|
|
);
|
|
|
|
}
|
|
|
|
for nb in self.neighbours(&node, node.get_mult() * range) {
|
|
|
|
let next_cost = *best.get(&nb.id).unwrap_or(&INF);
|
|
|
|
let new_cost = d + dist(&node.pos, &nb.pos);
|
|
|
|
if new_cost < next_cost {
|
|
|
|
best.insert(nb.id, new_cost);
|
|
|
|
prev.insert(nb.id, node.id);
|
|
|
|
queue.push(new_cost, (depth + 1, *nb));
|
|
|
|
} else {
|
|
|
|
s += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
debug!("Prev: {}", prev.len());
|
|
|
|
debug!("Best: {}", best.len());
|
|
|
|
todo!()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn neighbor_workers(
|
|
|
|
&mut self,
|
|
|
|
num: usize,
|
|
|
|
range: f32,
|
|
|
|
) -> (
|
|
|
|
Sender<Vec<TreeNode>>,
|
|
|
|
Receiver<Vec<(TreeNode, Vec<TreeNode>)>>,
|
|
|
|
Vec<JoinHandle<()>>,
|
|
|
|
) {
|
|
|
|
let r2 = range * range;
|
|
|
|
let (tx_q, rx_q) = unbounded::<Vec<TreeNode>>();
|
|
|
|
let (tx_r, rx_r) = bounded::<Vec<(TreeNode, Vec<TreeNode>)>>(100);
|
|
|
|
let threads: Vec<JoinHandle<()>> = (0..num)
|
|
|
|
.map(|_| {
|
|
|
|
let tree = Arc::clone(&self.tree);
|
|
|
|
let tx = tx_r.clone();
|
|
|
|
let rx = rx_q.clone();
|
|
|
|
thread::spawn(move || {
|
2022-06-14 21:00:50 +00:00
|
|
|
rx.into_iter().for_each(|nodes| {
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut ret = vec![];
|
|
|
|
for node in nodes {
|
|
|
|
let res: Vec<TreeNode> =
|
|
|
|
tree.locate_within_distance(node.pos, r2).cloned().collect();
|
|
|
|
ret.push((node, res));
|
|
|
|
}
|
|
|
|
tx.send(ret).unwrap();
|
2022-06-14 21:00:50 +00:00
|
|
|
});
|
2022-02-23 21:45:59 +00:00
|
|
|
drop(tx);
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
(tx_q, rx_r, threads)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn precompute_graph(&mut self, range: f32) -> Result<(), String> {
|
|
|
|
// TODO: fix multithreading workpool (?)
|
|
|
|
// TODO: actual route precomputation (?)
|
|
|
|
let total = self.tree.size();
|
|
|
|
let mut cnt: usize = 0;
|
|
|
|
let mut seen: FxHashSet<u32> = FxHashSet::default();
|
|
|
|
let mut queued: FxHashSet<u32> = FxHashSet::default();
|
|
|
|
info!("Total nodes: {}", total);
|
|
|
|
let (tx, rx, threads) = self.neighbor_workers(num_cpus::get(), range);
|
|
|
|
info!("Precomputing graph");
|
|
|
|
info!("Sumbitting jobs");
|
|
|
|
let seed = *self.tree.nearest_neighbor(&[0.0, 0.0, 0.0]).unwrap();
|
|
|
|
seen.insert(seed.id);
|
|
|
|
queued.insert(seed.id);
|
|
|
|
tx.send(vec![seed]).unwrap();
|
|
|
|
info!("Processing...");
|
|
|
|
// println!("RX:{} TX:{} Q:{}", rx.len(), tx.len(), queued.len());
|
|
|
|
while let Ok(res) = rx.recv() {
|
|
|
|
let mut to_send = vec![];
|
|
|
|
for (node, neighbors) in res {
|
|
|
|
cnt += neighbors.len();
|
|
|
|
queued.remove(&node.id);
|
|
|
|
for nb in neighbors {
|
|
|
|
if !seen.insert(nb.id) {
|
|
|
|
queued.insert(nb.id);
|
|
|
|
to_send.push(nb);
|
|
|
|
if to_send.len() > 10_000 {
|
|
|
|
tx.send(to_send).unwrap();
|
|
|
|
to_send = vec![];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !to_send.is_empty() {
|
|
|
|
tx.send(to_send).unwrap();
|
|
|
|
}
|
|
|
|
// n+=1;
|
|
|
|
// if n%10000==0 {
|
|
|
|
info!(
|
|
|
|
"{} total, {} count, {} seen, rx: {}, tx: {}, Q: {}",
|
|
|
|
total,
|
|
|
|
cnt,
|
|
|
|
seen.len(),
|
|
|
|
rx.len(),
|
|
|
|
tx.len(),
|
|
|
|
queued.len(),
|
|
|
|
);
|
|
|
|
// };
|
|
|
|
if queued.is_empty() {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for t in threads {
|
|
|
|
t.join().unwrap();
|
|
|
|
}
|
|
|
|
info!("Done!");
|
|
|
|
todo!("Implement seed parameter");
|
|
|
|
// Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg_attr(feature = "profiling", tracing::instrument)]
|
|
|
|
pub fn precompute_all(&mut self, range: f32) -> Result<(), String> {
|
2022-06-14 21:00:50 +00:00
|
|
|
use flate2::write::GzEncoder;
|
2022-02-23 21:45:59 +00:00
|
|
|
let fh_nb = File::create(format!(r#"O:\nb_{}.dat"#, range)).unwrap();
|
2022-06-14 21:00:50 +00:00
|
|
|
let mut fh_encoder = BufWriter::new(fh_nb);
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut pos: u64 = 0;
|
2022-06-14 21:00:50 +00:00
|
|
|
let mut n = 0;
|
2022-02-23 21:45:59 +00:00
|
|
|
let total = self.tree.size();
|
2022-06-14 21:00:50 +00:00
|
|
|
// let (tx, rx, threads) = self.neighbor_workers(num_cpus::get(), range);
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut map: FxHashMap<u32, u64> = FxHashMap::default();
|
2022-06-14 21:00:50 +00:00
|
|
|
info!("Precomputing neighbor map...");
|
|
|
|
self.tree.iter().for_each(|node| {
|
|
|
|
let nb = self.neighbours(node, range).map(|nb| nb.id).collect_vec();
|
|
|
|
map.insert(node.id, pos);
|
|
|
|
pos += fh_encoder.write(&bincode::serialize(&nb).unwrap()).unwrap() as u64;
|
|
|
|
if (n % 10000) == 0 {
|
|
|
|
let prc = ((n as f64) / (total as f64)) * 100f64;
|
|
|
|
info!("{}/{} ({:.2}%) done, {} bytes", n, total, prc, pos);
|
|
|
|
}
|
|
|
|
n += 1;
|
|
|
|
});
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut fh_idx = BufWriter::new(File::create(format!(r#"O:\nb_{}.idx"#, range)).unwrap());
|
|
|
|
info!("Writing index map");
|
|
|
|
info!(
|
|
|
|
"Wrote {} bytes",
|
|
|
|
fh_idx.write(&bincode::serialize(&map).unwrap()).unwrap()
|
|
|
|
);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn precompute_to(&mut self, _dst: &System, _range: f32) -> Result<(), String> {
|
2020-03-28 13:53:52 +00:00
|
|
|
// TODO: -> precompute to
|
|
|
|
unimplemented!();
|
|
|
|
}
|
|
|
|
|
2020-06-16 13:38:31 +00:00
|
|
|
fn precompute(&mut self, src: &System, range: f32) -> Result<(), String> {
|
2020-03-28 13:53:52 +00:00
|
|
|
// TODO: -> precompute from
|
2020-02-05 23:24:24 +00:00
|
|
|
let total = self.tree.size() as f32;
|
2022-02-23 21:45:59 +00:00
|
|
|
let _t_start = Instant::now();
|
|
|
|
let mut prev = FxHashMap::default();
|
|
|
|
let mut seen = FxHashSet::default();
|
2020-02-05 23:24:24 +00:00
|
|
|
let mut depth = 0;
|
|
|
|
let mut queue: VecDeque<(usize, TreeNode)> = VecDeque::new();
|
|
|
|
let mut queue_next: VecDeque<(usize, TreeNode)> = VecDeque::new();
|
|
|
|
queue.push_front((0, src.to_node()));
|
|
|
|
seen.insert(src.id);
|
|
|
|
while !queue.is_empty() {
|
2022-02-23 21:45:59 +00:00
|
|
|
info!(
|
|
|
|
"Depth: {}, Queue: {}, Seen: {} ({:.02}%) \r",
|
2020-02-05 23:24:24 +00:00
|
|
|
depth,
|
|
|
|
queue.len(),
|
|
|
|
seen.len(),
|
|
|
|
((seen.len() * 100) as f32) / total
|
|
|
|
);
|
|
|
|
std::io::stdout().flush().unwrap();
|
|
|
|
while let Some((d, sys)) = queue.pop_front() {
|
|
|
|
queue_next.extend(
|
2022-02-23 21:45:59 +00:00
|
|
|
self.neighbours(&sys, sys.get_mult() * range)
|
2020-02-05 23:24:24 +00:00
|
|
|
// .filter(|&nb| self.valid(nb))
|
|
|
|
.filter(|&nb| seen.insert(nb.id))
|
|
|
|
.map(|nb| {
|
|
|
|
prev.insert(nb.id, sys.id);
|
2020-06-16 13:38:31 +00:00
|
|
|
(d + 1, *nb)
|
2020-02-05 23:24:24 +00:00
|
|
|
}),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
std::mem::swap(&mut queue, &mut queue_next);
|
|
|
|
depth += 1;
|
|
|
|
}
|
|
|
|
self.route_tree = Some(prev);
|
2020-03-28 13:53:52 +00:00
|
|
|
let file_hash = hash_file(&self.path);
|
|
|
|
let file_hash_hex = file_hash
|
|
|
|
.iter()
|
|
|
|
.map(|v| format!("{:02x}", v))
|
|
|
|
.collect::<Vec<String>>()
|
|
|
|
.join("");
|
2020-02-05 23:24:24 +00:00
|
|
|
let ofn = format!(
|
2020-03-28 13:53:52 +00:00
|
|
|
"{}_{}_{}.router",
|
2022-02-23 21:45:59 +00:00
|
|
|
src.name.replace('*', "").replace(' ', "_"),
|
2020-03-28 13:53:52 +00:00
|
|
|
range,
|
|
|
|
file_hash_hex
|
2020-02-05 23:24:24 +00:00
|
|
|
);
|
|
|
|
let mut out_fh = BufWriter::new(File::create(&ofn).unwrap());
|
|
|
|
let data = (
|
2020-03-28 13:53:52 +00:00
|
|
|
self.tree.size(),
|
|
|
|
range,
|
|
|
|
file_hash,
|
2020-02-05 23:24:24 +00:00
|
|
|
self.path.clone(),
|
|
|
|
self.route_tree.as_ref().unwrap(),
|
|
|
|
);
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Done!");
|
2020-02-05 23:24:24 +00:00
|
|
|
match bincode::serialize_into(&mut out_fh, &data) {
|
|
|
|
Ok(_) => Ok(()),
|
2020-06-16 13:38:31 +00:00
|
|
|
Err(e) => Err(format!("Error: {}", e)),
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_sys(&self, id: u32) -> Result<Option<System>, String> {
|
|
|
|
let path = &self.path;
|
|
|
|
if let Some(c) = &self.cache {
|
2022-02-23 21:45:59 +00:00
|
|
|
if let Some(sys) = c.lock().unwrap().get(id)? {
|
2020-06-16 13:38:31 +00:00
|
|
|
return Ok(Some(sys));
|
|
|
|
};
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
let mut reader = match csv::ReaderBuilder::new().from_path(path) {
|
|
|
|
Ok(reader) => reader,
|
|
|
|
Err(e) => {
|
|
|
|
return Err(format!("Error opening {}: {}", path.display(), e));
|
|
|
|
}
|
|
|
|
};
|
2022-02-23 21:45:59 +00:00
|
|
|
warn!("Running serial search for ID: {:?}", id);
|
2020-02-05 23:24:24 +00:00
|
|
|
return Ok(reader
|
2022-02-23 21:45:59 +00:00
|
|
|
.deserialize::<System>()
|
2020-02-05 23:24:24 +00:00
|
|
|
.map(|res| res.unwrap())
|
|
|
|
.filter(|sys| sys.id == id)
|
|
|
|
.last());
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
fn get_systems_by_ids(&self, ids: &[u32]) -> Result<FxHashMap<u32, System>, String> {
|
2020-02-05 23:24:24 +00:00
|
|
|
let path = &self.path;
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut ret = FxHashMap::default();
|
2020-02-05 23:24:24 +00:00
|
|
|
if let Some(c) = &self.cache {
|
|
|
|
let mut c = c.lock().unwrap();
|
|
|
|
let mut missing = false;
|
|
|
|
for id in ids {
|
2022-02-23 21:45:59 +00:00
|
|
|
match c.get(*id)? {
|
2020-02-05 23:24:24 +00:00
|
|
|
Some(sys) => {
|
|
|
|
ret.insert(*id, sys);
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
missing = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !missing {
|
|
|
|
return Ok(ret);
|
|
|
|
}
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut reader = match csv::ReaderBuilder::new().has_headers(false).from_path(path) {
|
2020-02-05 23:24:24 +00:00
|
|
|
Ok(reader) => reader,
|
|
|
|
Err(e) => {
|
|
|
|
return Err(format!("Error opening {}: {}", path.display(), e));
|
|
|
|
}
|
|
|
|
};
|
2022-02-23 21:45:59 +00:00
|
|
|
warn!("Running serial search for IDs: {:?}", ids);
|
2020-02-05 23:24:24 +00:00
|
|
|
reader
|
2022-02-23 21:45:59 +00:00
|
|
|
.deserialize::<System>()
|
2020-02-05 23:24:24 +00:00
|
|
|
.map(|res| res.unwrap())
|
|
|
|
.filter(|sys| ids.contains(&sys.id))
|
2020-06-16 13:38:31 +00:00
|
|
|
.for_each(|sys| {
|
2022-02-23 21:45:59 +00:00
|
|
|
ret.insert(sys.id, sys);
|
2020-06-16 13:38:31 +00:00
|
|
|
});
|
2020-02-05 23:24:24 +00:00
|
|
|
for id in ids {
|
2022-02-23 21:45:59 +00:00
|
|
|
if !ret.contains_key(id) {
|
2020-02-05 23:24:24 +00:00
|
|
|
return Err(format!("ID {} not found", id));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(ret)
|
|
|
|
}
|
|
|
|
|
2020-06-16 13:38:31 +00:00
|
|
|
fn route_to(&self, dst: &System) -> Result<Vec<System>, String> {
|
2020-03-28 13:53:52 +00:00
|
|
|
if self.route_tree.is_none() {
|
|
|
|
return Err("Can't computer route without a precomputed route-tree".to_owned());
|
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
let prev = self.route_tree.as_ref().unwrap();
|
|
|
|
if !prev.contains_key(&dst.id) {
|
2020-06-16 13:38:31 +00:00
|
|
|
return Err(format!("System-ID {} not found", dst.id));
|
2020-02-05 23:24:24 +00:00
|
|
|
};
|
|
|
|
let mut v_ids: Vec<u32> = Vec::new();
|
|
|
|
let mut v: Vec<System> = Vec::new();
|
|
|
|
let mut curr_sys: u32 = dst.id;
|
|
|
|
loop {
|
|
|
|
v_ids.push(curr_sys);
|
|
|
|
match prev.get(&curr_sys) {
|
|
|
|
Some(sys_id) => curr_sys = *sys_id,
|
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
v_ids.reverse();
|
|
|
|
let id_map = self.get_systems_by_ids(&v_ids)?;
|
|
|
|
for sys_id in v_ids {
|
|
|
|
let sys = match id_map.get(&sys_id) {
|
|
|
|
Some(sys) => sys,
|
|
|
|
None => {
|
|
|
|
return Err(format!("System-ID {} not found!", sys_id));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
v.push(sys.clone())
|
|
|
|
}
|
|
|
|
Ok(v)
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
#[cfg_attr(feature = "profiling", tracing::instrument)]
|
|
|
|
fn route_dfs(
|
|
|
|
&self,
|
|
|
|
start_sys: &System,
|
|
|
|
goal_sys: &System,
|
|
|
|
range: f32,
|
|
|
|
) -> Result<Vec<System>, String> {
|
|
|
|
if start_sys.id == goal_sys.id {
|
|
|
|
return Ok(vec![goal_sys.clone()]);
|
|
|
|
}
|
|
|
|
let mut log_file = BufWriter::new(File::create("route_log.txt").unwrap());
|
|
|
|
let t_start = Instant::now();
|
|
|
|
info!("Running DFS");
|
|
|
|
let src_name = start_sys.name.clone();
|
|
|
|
let dst_name = goal_sys.name.clone();
|
|
|
|
let d_total = dist(&start_sys.pos, &goal_sys.pos);
|
|
|
|
let mut d_rem = d_total;
|
|
|
|
let mut state = SearchState {
|
|
|
|
mode: "DFS".into(),
|
|
|
|
depth: 0,
|
|
|
|
queue_size: 0,
|
|
|
|
d_rem,
|
|
|
|
d_total,
|
|
|
|
prc_done: 0.0,
|
|
|
|
n_seen: 0,
|
|
|
|
prc_seen: 0.0,
|
|
|
|
from: src_name.clone(),
|
|
|
|
to: dst_name.clone(),
|
|
|
|
system: start_sys.name.clone(),
|
|
|
|
};
|
|
|
|
let total = self.tree.size() as f32;
|
|
|
|
let mut prev = FxHashMap::default();
|
|
|
|
let mut seen: FxHashMap<u32, usize> = FxHashMap::default();
|
|
|
|
let mut best_depth = usize::MAX;
|
|
|
|
let found = false;
|
|
|
|
let mut t_last = Instant::now();
|
|
|
|
let mut queue: MinFHeap<(usize, TreeNode)> = MinFHeap::new();
|
|
|
|
// let mut best = (start_sys.distp(goal_sys), start_sys.to_node());
|
|
|
|
|
|
|
|
queue.push(d_total, (0, start_sys.to_node()));
|
|
|
|
seen.insert(start_sys.id, 0);
|
|
|
|
loop {
|
|
|
|
while let Some((_, (depth, node))) = queue.pop() {
|
|
|
|
if depth > best_depth {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
let dist_goal = node.distp(goal_sys);
|
|
|
|
if dist_goal < d_rem {
|
|
|
|
d_rem = dist_goal;
|
|
|
|
// best = (d_rem, node);
|
|
|
|
};
|
|
|
|
writeln!(log_file, "{},{}", node.id, depth).unwrap();
|
|
|
|
if node.id == goal_sys.id {
|
|
|
|
if depth < best_depth {
|
|
|
|
info!("Goal reached in {} jumps, best: {}", depth, best_depth);
|
|
|
|
}
|
|
|
|
best_depth = best_depth.min(depth);
|
|
|
|
}
|
|
|
|
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
|
|
|
|
state.depth = depth;
|
|
|
|
state.queue_size = queue.len();
|
|
|
|
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
|
|
|
|
state.d_rem = d_rem;
|
|
|
|
state.n_seen = seen.len();
|
|
|
|
state.prc_seen = ((seen.len() * 100) as f32) / total;
|
|
|
|
state.system = node.get(self)?.unwrap().name.clone();
|
|
|
|
if let Some(cb) = &self.callback {
|
|
|
|
match cb(&state) {
|
|
|
|
Ok(_) => (),
|
|
|
|
Err(e) => {
|
|
|
|
return Err(format!("{:?}", e));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
t_last = Instant::now();
|
|
|
|
}
|
|
|
|
self.neighbours(&node, node.get_mult() * range)
|
|
|
|
.filter(|nb| (self.valid(nb.id) || (nb.id == goal_sys.id)))
|
|
|
|
.filter(|nb| {
|
|
|
|
let depth = depth + 1;
|
|
|
|
if depth > best_depth {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
let mut better = true;
|
|
|
|
seen.entry(nb.id)
|
|
|
|
.and_modify(|e| {
|
|
|
|
better = depth <= *e;
|
|
|
|
if better {
|
|
|
|
*e = depth;
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.or_insert(depth);
|
|
|
|
return better;
|
|
|
|
})
|
|
|
|
.for_each(|nb| {
|
|
|
|
prev.insert(nb.id, node);
|
|
|
|
queue.push(dist(&nb.pos, &goal_sys.pos), (depth + 1, *nb));
|
|
|
|
});
|
|
|
|
}
|
|
|
|
// let next_len=queue_next.len();
|
|
|
|
// info!("Queue: {} | Depth: {} | best_d_goal: {}",queue_next.len(),depth,best_d_goal);
|
|
|
|
if found {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if queue.is_empty() {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
info!("Took: {}", format_duration(t_start.elapsed()));
|
|
|
|
if !found {
|
|
|
|
return Err(format!(
|
|
|
|
"No route from {} to {} found, remaining distance: {} Ly",
|
|
|
|
src_name, dst_name, d_rem
|
|
|
|
));
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut v: Vec<System> = Vec::new();
|
|
|
|
let mut curr_sys = goal_sys.clone();
|
|
|
|
loop {
|
|
|
|
v.push(curr_sys.clone());
|
|
|
|
match prev.get(&curr_sys.id) {
|
|
|
|
Some(sys) => curr_sys = sys.get(self)?.unwrap(),
|
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
v.reverse();
|
|
|
|
Ok(v)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg_attr(feature = "profiling", tracing::instrument)]
|
2020-06-16 13:38:31 +00:00
|
|
|
fn route_bfs(
|
2020-03-28 13:53:52 +00:00
|
|
|
&self,
|
|
|
|
start_sys: &System,
|
|
|
|
goal_sys: &System,
|
|
|
|
range: f32,
|
2022-02-23 21:45:59 +00:00
|
|
|
beam_width: &BeamWidth,
|
|
|
|
max_dist: f32,
|
2020-03-28 13:53:52 +00:00
|
|
|
) -> Result<Vec<System>, String> {
|
2020-02-05 23:24:24 +00:00
|
|
|
if self.workers.is_empty() {
|
2022-02-23 21:45:59 +00:00
|
|
|
return self.route_bfs_serial(start_sys, goal_sys, range, max_dist, beam_width);
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Running BFS with {} worker(s)", self.workers.num());
|
2020-02-05 23:24:24 +00:00
|
|
|
let t_start = Instant::now();
|
|
|
|
let mut t_last = Instant::now();
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut prev = FxHashMap::default();
|
|
|
|
let mut seen: FxHashMap<u32, f32> = FxHashMap::default();
|
|
|
|
let src_name = start_sys.name.clone();
|
|
|
|
let dst_name = goal_sys.name.clone();
|
|
|
|
|
|
|
|
let goal_node = goal_sys.to_node();
|
|
|
|
let num_workers = self.workers.num();
|
2020-02-05 23:24:24 +00:00
|
|
|
let workers = &self.workers;
|
|
|
|
let wu = WorkUnit {
|
|
|
|
node: start_sys.to_node(),
|
|
|
|
parent_id: None,
|
|
|
|
depth: 0,
|
2020-03-28 13:53:52 +00:00
|
|
|
range,
|
2020-02-05 23:24:24 +00:00
|
|
|
};
|
2020-03-28 13:53:52 +00:00
|
|
|
if wu.node.id == goal_sys.id {
|
2020-02-05 23:24:24 +00:00
|
|
|
return Ok(vec![goal_sys.clone()]);
|
|
|
|
}
|
|
|
|
let mut found = false;
|
|
|
|
let total = self.tree.size() as f32;
|
|
|
|
let d_total = dist(&start_sys.pos, &goal_sys.pos);
|
|
|
|
let mut d_rem = d_total;
|
|
|
|
let mut state = SearchState {
|
2022-02-23 21:45:59 +00:00
|
|
|
mode: format!("BFS_parallel({})", num_workers),
|
2020-02-05 23:24:24 +00:00
|
|
|
depth: 0,
|
|
|
|
queue_size: 0,
|
|
|
|
d_rem,
|
|
|
|
d_total,
|
|
|
|
prc_done: 0.0,
|
|
|
|
n_seen: 0,
|
|
|
|
prc_seen: 0.0,
|
|
|
|
from: src_name.clone(),
|
|
|
|
to: dst_name.clone(),
|
2022-02-23 21:45:59 +00:00
|
|
|
system: start_sys.name.clone(),
|
2020-02-05 23:24:24 +00:00
|
|
|
};
|
2020-06-16 13:38:31 +00:00
|
|
|
seen.insert(wu.node.id, 0.0);
|
2020-02-05 23:24:24 +00:00
|
|
|
workers.send(wu).unwrap();
|
|
|
|
loop {
|
|
|
|
if found {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
let num_seen = seen.len();
|
|
|
|
let mut nbs: Vec<_> = workers
|
2022-02-23 21:45:59 +00:00
|
|
|
.recv()?
|
|
|
|
.iter()
|
2020-06-16 13:38:31 +00:00
|
|
|
.filter(|wu| !found && !seen.contains_key(&wu.node.id))
|
2020-02-05 23:24:24 +00:00
|
|
|
.filter(|wu| wu.parent_id.is_some())
|
2022-02-23 21:45:59 +00:00
|
|
|
.cloned()
|
2020-02-05 23:24:24 +00:00
|
|
|
.collect();
|
2020-03-28 13:53:52 +00:00
|
|
|
if nbs.is_empty() && workers.queue_empty() && seen.len() > 1 {
|
2020-02-05 23:24:24 +00:00
|
|
|
break;
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
if beam_width.is_set() {
|
|
|
|
let bw = beam_width.compute(nbs.len());
|
|
|
|
nbs.sort_by_key(|v| {
|
|
|
|
return F32(heuristic(range, &v.node, &goal_node));
|
2020-03-28 13:53:52 +00:00
|
|
|
});
|
2022-02-23 21:45:59 +00:00
|
|
|
nbs = nbs.iter().take(bw.max(1)).cloned().collect();
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
|
|
|
|
2020-02-05 23:24:24 +00:00
|
|
|
while let Some(wu) = nbs.pop() {
|
2022-02-23 21:45:59 +00:00
|
|
|
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
|
|
|
|
let dist = wu.node.distp(goal_sys);
|
|
|
|
if dist < d_rem {
|
|
|
|
d_rem = dist;
|
|
|
|
};
|
|
|
|
state.depth = wu.depth;
|
|
|
|
state.queue_size = workers.queue_size();
|
|
|
|
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
|
|
|
|
state.d_rem = d_rem;
|
|
|
|
state.n_seen = num_seen;
|
|
|
|
state.prc_seen = ((num_seen * 100) as f32) / total;
|
|
|
|
{
|
|
|
|
let s = self.get(wu.node.id)?.unwrap();
|
|
|
|
state.system = s.name;
|
|
|
|
}
|
|
|
|
if let Some(cb) = &self.callback {
|
|
|
|
match cb(&state) {
|
|
|
|
Ok(_) => (),
|
|
|
|
Err(e) => {
|
|
|
|
return Err(format!("{:?}", e));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
t_last = Instant::now();
|
|
|
|
}
|
|
|
|
|
2020-02-05 23:24:24 +00:00
|
|
|
if let Some(parent_id) = wu.parent_id {
|
|
|
|
prev.insert(wu.node.id, parent_id);
|
|
|
|
}
|
2020-06-16 13:38:31 +00:00
|
|
|
seen.insert(wu.node.id, 0.0);
|
2020-02-05 23:24:24 +00:00
|
|
|
if wu.node.id == goal_sys.id {
|
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
workers.send(wu.clone()).unwrap();
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Took: {}", format_duration(t_start.elapsed()));
|
2020-02-05 23:24:24 +00:00
|
|
|
if !found {
|
|
|
|
return Err(format!("No route from {} to {} found!", src_name, dst_name));
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut v: Vec<System> = Vec::new();
|
|
|
|
let mut curr_sys = goal_sys.clone();
|
|
|
|
loop {
|
|
|
|
v.push(curr_sys.clone());
|
|
|
|
match prev.get(&curr_sys.id) {
|
|
|
|
Some(sys) => {
|
|
|
|
curr_sys = self
|
|
|
|
.get_sys(*sys)?
|
|
|
|
.ok_or(format!("System id {} not found", sys))?
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
v.reverse();
|
|
|
|
Ok(v)
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:45:59 +00:00
|
|
|
#[cfg_attr(feature = "profiling", tracing::instrument)]
|
2020-06-16 13:38:31 +00:00
|
|
|
fn route_bfs_serial(
|
2020-02-05 23:24:24 +00:00
|
|
|
&self,
|
|
|
|
start_sys: &System,
|
|
|
|
goal_sys: &System,
|
2020-03-28 13:53:52 +00:00
|
|
|
range: f32,
|
2022-02-23 21:45:59 +00:00
|
|
|
max_dist: f32,
|
|
|
|
beam_width: &BeamWidth,
|
2020-02-05 23:24:24 +00:00
|
|
|
) -> Result<Vec<System>, String> {
|
2020-03-28 13:53:52 +00:00
|
|
|
if start_sys.id == goal_sys.id {
|
2020-02-05 23:24:24 +00:00
|
|
|
return Ok(vec![goal_sys.clone()]);
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
// let mut log_file = BufWriter::new(File::create("route_log.txt").unwrap());
|
2020-02-05 23:24:24 +00:00
|
|
|
let t_start = Instant::now();
|
2022-02-23 21:45:59 +00:00
|
|
|
let _max_dist = max_dist * max_dist;
|
|
|
|
info!("Running BFS");
|
|
|
|
let src_name = start_sys.name.clone();
|
|
|
|
let dst_name = goal_sys.name.clone();
|
2020-02-05 23:24:24 +00:00
|
|
|
let d_total = dist(&start_sys.pos, &goal_sys.pos);
|
|
|
|
let mut d_rem = d_total;
|
|
|
|
let mut state = SearchState {
|
2020-03-28 13:53:52 +00:00
|
|
|
mode: "BFS_serial".into(),
|
2020-02-05 23:24:24 +00:00
|
|
|
depth: 0,
|
|
|
|
queue_size: 0,
|
|
|
|
d_rem,
|
|
|
|
d_total,
|
|
|
|
prc_done: 0.0,
|
|
|
|
n_seen: 0,
|
|
|
|
prc_seen: 0.0,
|
|
|
|
from: src_name.clone(),
|
|
|
|
to: dst_name.clone(),
|
2022-02-23 21:45:59 +00:00
|
|
|
system: start_sys.name.clone(),
|
2020-02-05 23:24:24 +00:00
|
|
|
};
|
|
|
|
let total = self.tree.size() as f32;
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut prev = FxHashMap::default();
|
|
|
|
let mut seen: FxHashMap<u32, f32> = FxHashMap::default();
|
|
|
|
seen.reserve(self.tree.size());
|
|
|
|
prev.reserve(self.tree.size());
|
2020-02-05 23:24:24 +00:00
|
|
|
let mut depth = 0;
|
|
|
|
let mut found = false;
|
|
|
|
let mut t_last = Instant::now();
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut queue: VecDeque<&TreeNode> = VecDeque::new();
|
|
|
|
let mut queue_next: VecDeque<&TreeNode> = VecDeque::new();
|
|
|
|
queue.reserve(100_000);
|
|
|
|
queue_next.reserve(100_000);
|
|
|
|
let start_node = start_sys.to_node();
|
|
|
|
let end_node = goal_sys.to_node();
|
|
|
|
let mut best = (start_sys.distp(goal_sys), start_sys.to_node());
|
|
|
|
queue.push_front(&start_node);
|
2020-06-16 13:38:31 +00:00
|
|
|
seen.insert(start_sys.id, 0.0);
|
2020-02-05 23:24:24 +00:00
|
|
|
while !found {
|
|
|
|
while let Some(node) = queue.pop_front() {
|
2022-02-23 21:45:59 +00:00
|
|
|
let _h_curr = heuristic(range, node, &end_node);
|
|
|
|
let dist = node.distp(goal_sys);
|
|
|
|
if dist < d_rem {
|
|
|
|
d_rem = dist;
|
|
|
|
best = (d_rem, *node);
|
|
|
|
};
|
|
|
|
// writeln!(log_file, "{},{}", node.id, depth).unwrap();
|
2020-02-05 23:24:24 +00:00
|
|
|
if node.id == goal_sys.id {
|
|
|
|
queue.clear();
|
|
|
|
found = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
|
|
|
|
state.depth = depth;
|
|
|
|
state.queue_size = queue.len() + queue_next.len();
|
|
|
|
state.prc_done = ((d_total - d_rem) * 100f32) / d_total;
|
|
|
|
state.d_rem = d_rem;
|
|
|
|
state.n_seen = seen.len();
|
|
|
|
state.prc_seen = ((seen.len() * 100) as f32) / total;
|
2020-03-28 13:53:52 +00:00
|
|
|
if !queue.is_empty() {
|
2022-02-23 21:45:59 +00:00
|
|
|
let s = queue.get(0).unwrap().get(self)?.unwrap();
|
|
|
|
state.system = s.name.clone();
|
|
|
|
}
|
|
|
|
if let Some(cb) = &self.callback {
|
|
|
|
match cb(&state) {
|
|
|
|
Ok(_) => (),
|
|
|
|
Err(e) => {
|
|
|
|
return Err(format!("{:?}", e));
|
|
|
|
}
|
|
|
|
};
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
t_last = Instant::now();
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let jump_range = node.get_mult() * range;
|
2020-02-05 23:24:24 +00:00
|
|
|
let valid_nbs = self
|
2022-02-23 21:45:59 +00:00
|
|
|
.neighbours(node, jump_range)
|
2020-02-05 23:24:24 +00:00
|
|
|
.filter(|nb| (self.valid(nb.id) || (nb.id == goal_sys.id)))
|
2022-02-23 21:45:59 +00:00
|
|
|
.filter(|nb| seen.insert(nb.id, 0.0).is_none());
|
|
|
|
queue_next.extend(valid_nbs.map(|nb| {
|
|
|
|
prev.insert(nb.id, node);
|
|
|
|
nb
|
|
|
|
}));
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
// let next_len=queue_next.len();
|
|
|
|
// let mut queue_filt = Vec::new();
|
|
|
|
// let best_d_goal = dist(&best.1.pos, &goal_sys.pos);
|
|
|
|
// // info!("Queue: {} | Depth: {} | best_d_goal: {}",queue_next.len(),depth,best_d_goal);
|
|
|
|
// while let Some(v) = queue_next.pop_front() {
|
|
|
|
// let d_current = dist(&v.pos, &goal_sys.pos);
|
|
|
|
// let diff = (d_current - best_d_goal).abs();
|
|
|
|
// // info!("diff: {} | in_range: {}",diff, diff<(range*8.0));
|
|
|
|
// if diff < (range * 32.0) {
|
|
|
|
// queue_filt.push(v);
|
|
|
|
// };
|
|
|
|
// // if dist(&v.pos,&best.1.pos)<(range*2.0) {
|
|
|
|
// // queue_filt.push(v);
|
|
|
|
// // }
|
|
|
|
// }
|
|
|
|
if beam_width.is_set() {
|
|
|
|
let queue_filt = queue_next.make_contiguous();
|
|
|
|
queue_filt.sort_by_key(|v| {
|
|
|
|
return F32(heuristic(range, v, &end_node));
|
2020-03-28 13:53:52 +00:00
|
|
|
});
|
|
|
|
queue.clear();
|
2022-02-23 21:45:59 +00:00
|
|
|
let bw = beam_width.compute(queue_next.len());
|
|
|
|
queue.extend(queue_next.drain(..).take(bw.max(1)));
|
2020-03-28 13:53:52 +00:00
|
|
|
queue_next.clear();
|
|
|
|
} else {
|
2022-02-23 21:45:59 +00:00
|
|
|
queue.clear();
|
|
|
|
queue.append(&mut queue_next);
|
2020-03-28 13:53:52 +00:00
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
if found {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if queue.is_empty() {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
depth += 1;
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
info!("Took: {}", format_duration(t_start.elapsed()));
|
2020-02-05 23:24:24 +00:00
|
|
|
if !found {
|
2022-02-23 21:45:59 +00:00
|
|
|
return Err(format!(
|
|
|
|
"No route from {} to {} found, remaining distance: {} Ly",
|
|
|
|
src_name, dst_name, d_rem
|
|
|
|
));
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let mut v: Vec<System> = Vec::new();
|
|
|
|
let mut curr_sys = goal_sys.clone();
|
|
|
|
loop {
|
|
|
|
v.push(curr_sys.clone());
|
|
|
|
match prev.get(&curr_sys.id) {
|
2022-02-23 21:45:59 +00:00
|
|
|
Some(sys) => curr_sys = sys.get(self)?.unwrap(),
|
2020-02-05 23:24:24 +00:00
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
v.reverse();
|
|
|
|
Ok(v)
|
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
|
|
|
|
#[cfg_attr(feature = "profiling", tracing::instrument)]
|
|
|
|
pub fn route_bidir(
|
|
|
|
&self,
|
|
|
|
start_sys: &System,
|
|
|
|
goal_sys: &System,
|
|
|
|
range: f32,
|
|
|
|
) -> Result<Vec<System>, String> {
|
|
|
|
if start_sys.id == goal_sys.id {
|
|
|
|
return Ok(vec![goal_sys.clone()]);
|
|
|
|
}
|
|
|
|
let mut n: usize = 0;
|
|
|
|
let _log_file = BufWriter::new(File::create("route_log.txt").unwrap());
|
|
|
|
let t_start = Instant::now();
|
|
|
|
info!("Running BiDir");
|
|
|
|
let src_name = start_sys.name.clone();
|
|
|
|
let dst_name = goal_sys.name.clone();
|
|
|
|
let d_total = dist(&start_sys.pos, &goal_sys.pos);
|
|
|
|
let d_rem = d_total;
|
|
|
|
let _state = SearchState {
|
|
|
|
mode: "BiDir".into(),
|
|
|
|
depth: 0,
|
|
|
|
queue_size: 0,
|
|
|
|
d_rem,
|
|
|
|
d_total,
|
|
|
|
prc_done: 0.0,
|
|
|
|
n_seen: 0,
|
|
|
|
prc_seen: 0.0,
|
|
|
|
from: src_name.clone(),
|
|
|
|
to: dst_name.clone(),
|
|
|
|
system: start_sys.name.clone(),
|
|
|
|
};
|
|
|
|
let _total = self.tree.size() as f32;
|
|
|
|
let mut prev = FxHashMap::default();
|
|
|
|
let mut seen_fwd: FxHashSet<u32> = FxHashSet::default();
|
|
|
|
let mut seen_rev: FxHashSet<u32> = FxHashSet::default();
|
|
|
|
let mut t_last = Instant::now();
|
|
|
|
let mut queue: VecDeque<(usize, BiDirNode)> = VecDeque::new();
|
|
|
|
queue.push_back((0, BiDirNode::Forward(start_sys.to_node())));
|
|
|
|
queue.push_back((0, BiDirNode::Backwards(goal_sys.to_node())));
|
|
|
|
seen_fwd.insert(start_sys.id);
|
|
|
|
seen_rev.insert(goal_sys.id);
|
|
|
|
while FxHashSet::is_disjoint(&seen_fwd, &seen_rev) {
|
|
|
|
if queue.is_empty() {
|
|
|
|
return Err(format!(
|
|
|
|
"No route from {} to {} found, remaining distance: {} Ly",
|
|
|
|
src_name, dst_name, d_rem
|
|
|
|
));
|
|
|
|
}
|
|
|
|
while let Some((depth, node)) = queue.pop_front() {
|
|
|
|
if t_last.elapsed().as_millis() > STATUS_INVERVAL {
|
|
|
|
if !FxHashSet::is_disjoint(&seen_fwd, &seen_rev) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
info!("Q: {}, D: {}", queue.len(), depth);
|
|
|
|
t_last = Instant::now();
|
|
|
|
}
|
|
|
|
n += 1;
|
|
|
|
let next_depth = depth + 1;
|
|
|
|
match node {
|
|
|
|
BiDirNode::Forward(node) => {
|
2022-06-14 21:00:50 +00:00
|
|
|
let nbs =
|
|
|
|
self.neighbours(&node, node.get_mult() * range)
|
|
|
|
.filter_map(|nb| {
|
|
|
|
if !seen_fwd.insert(nb.id) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
prev.insert(nb.id, node.id);
|
|
|
|
Some((next_depth, BiDirNode::Forward(*nb)))
|
|
|
|
});
|
2022-02-23 21:45:59 +00:00
|
|
|
queue.extend(nbs);
|
|
|
|
}
|
|
|
|
BiDirNode::Backwards(node) => {
|
|
|
|
let nbs = self.neighbours_r(&node, range).filter_map(|nb| {
|
|
|
|
if !seen_rev.insert(nb.id) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
prev.insert(node.id, nb.id);
|
|
|
|
Some((next_depth, BiDirNode::Backwards(*nb)))
|
|
|
|
});
|
|
|
|
queue.extend(nbs);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
info!("Took: {}", format_duration(t_start.elapsed()));
|
|
|
|
let mut v: Vec<System> = Vec::new();
|
|
|
|
let mut curr_sys = goal_sys.clone().id;
|
|
|
|
v.push(self.get_sys(curr_sys)?.unwrap());
|
|
|
|
while let Some(&prev_sys_id) = prev.get(&curr_sys) {
|
|
|
|
v.push(self.get_sys(prev_sys_id)?.unwrap());
|
|
|
|
curr_sys = prev_sys_id;
|
|
|
|
}
|
|
|
|
v.reverse();
|
|
|
|
Ok(v)
|
|
|
|
}
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
|
2020-03-28 13:53:52 +00:00
|
|
|
impl Router {
|
2022-02-23 21:45:59 +00:00
|
|
|
#[cfg_attr(feature = "profiling", tracing::instrument)]
|
2020-06-16 13:38:31 +00:00
|
|
|
pub fn compute_route(
|
2020-03-28 13:53:52 +00:00
|
|
|
&mut self,
|
|
|
|
sys_ids: &[u32],
|
2020-06-16 13:38:31 +00:00
|
|
|
range: Option<f32>,
|
2022-02-23 21:45:59 +00:00
|
|
|
mode: ModeConfig,
|
2020-03-28 13:53:52 +00:00
|
|
|
num_workers: usize,
|
|
|
|
) -> Result<Vec<System>, String> {
|
2022-02-23 21:45:59 +00:00
|
|
|
let mut has_ship = false;
|
|
|
|
if range.is_none() {
|
|
|
|
if let ModeConfig::Ship { .. } = mode {
|
|
|
|
has_ship = true;
|
|
|
|
} else {
|
|
|
|
return Err(
|
|
|
|
"Need either a jump range or a ship to compute a route with!".to_owned(),
|
|
|
|
);
|
|
|
|
}
|
2020-06-16 13:38:31 +00:00
|
|
|
}
|
2022-02-23 21:45:59 +00:00
|
|
|
let range = if has_ship {
|
|
|
|
0.0
|
|
|
|
} else {
|
|
|
|
range.ok_or("Dynamic range calculation is not yet implemented, sorry!")?
|
|
|
|
};
|
2020-03-28 13:53:52 +00:00
|
|
|
let id_map = self.get_systems_by_ids(sys_ids)?;
|
|
|
|
let hops: Vec<System> = sys_ids
|
|
|
|
.iter()
|
2022-02-23 21:45:59 +00:00
|
|
|
.map(|id| id_map.get(id).unwrap())
|
2020-03-28 13:53:52 +00:00
|
|
|
.cloned()
|
|
|
|
.collect();
|
2022-02-23 21:45:59 +00:00
|
|
|
self.multiroute(&hops, range, mode, num_workers)
|
2020-02-05 23:24:24 +00:00
|
|
|
}
|
|
|
|
}
|