2022-06-14
This commit is contained in:
		
							parent
							
								
									dc68cce9ed
								
							
						
					
					
						commit
						652609ca71
					
				
					 18 changed files with 565 additions and 2144 deletions
				
			
		
							
								
								
									
										3
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							|  | @ -1,5 +1,6 @@ | |||
| rust/target | ||||
| rust/.history/ | ||||
| rust/Cargo.lock | ||||
| **/*.rs.bk | ||||
| *.tmp | ||||
| *.idx | ||||
|  | @ -20,7 +21,7 @@ pip-wheel-metadata | |||
| .eggs/ | ||||
| dist/ | ||||
| installer/Output/ | ||||
| workspace.code-workspace   | ||||
| workspace.code-workspace | ||||
| ed_lrr_gui/web/jobs.db | ||||
| ed_lrr_gui/web/ed_lrr_web_ui.db | ||||
| __version__.py | ||||
|  |  | |||
|  | @ -1,4 +1,11 @@ | |||
| include rust/Cargo.toml | ||||
| include rust/.cargo/config | ||||
| exclude docs_mdbook | ||||
| exclude celery_test | ||||
| exclude installer | ||||
| exclude imgui_test | ||||
| exclude icon | ||||
| recursive-include rust/src * | ||||
| recursive-include ed_lrr_gui * | ||||
| recursive-include ed_lrr_gui * | ||||
| recursive-exclude __pycache__ *.pyc *.pyo | ||||
| global-exclude __pycache__ | ||||
|  | @ -1,3 +1,9 @@ | |||
| [build-system] | ||||
| requires = ["setuptools", "wheel","setuptools_rust"] | ||||
| build-backend = "setuptools.build_meta" | ||||
| 
 | ||||
| [tool.poetry] | ||||
| description = "Elite: Dangerous Long Range Route Plotter" | ||||
| name="ed_lrr" | ||||
| version="0.2.0" | ||||
| authors = ["Daniel Seiller <earthnuker@gmail.com>"] | ||||
							
								
								
									
										26
									
								
								rust/.devcontainer/devcontainer.json
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								rust/.devcontainer/devcontainer.json
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | |||
| // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: | ||||
| // https://github.com/microsoft/vscode-dev-containers/tree/v0.238.0/containers/docker-existing-dockerfile | ||||
| { | ||||
| 	"name": "ED_LRR", | ||||
| 
 | ||||
| 	// Sets the run context to one level up instead of the .devcontainer folder. | ||||
| 	"context": "..", | ||||
| 
 | ||||
| 	// Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename. | ||||
| 	"dockerFile": "../Dockerfile", | ||||
| 
 | ||||
| 	// Use 'forwardPorts' to make a list of ports inside the container available locally. | ||||
| 	// "forwardPorts": [], | ||||
| 
 | ||||
| 	// Uncomment the next line to run commands after the container is created - for example installing curl. | ||||
| 	// "postCreateCommand": "apt-get update && apt-get install -y curl", | ||||
| 
 | ||||
| 	// Uncomment when using a ptrace-based debugger like C++, Go, and Rust | ||||
| 	"runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined" ] | ||||
| 
 | ||||
| 	// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker. | ||||
| 	// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ], | ||||
| 
 | ||||
| 	// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root. | ||||
| 	// "remoteUser": "vscode" | ||||
| } | ||||
							
								
								
									
										5
									
								
								rust/.vscode/settings.json
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								rust/.vscode/settings.json
									
										
									
									
										vendored
									
									
								
							|  | @ -10,10 +10,11 @@ | |||
|     ], | ||||
|     "discord.enabled": true, | ||||
|     "python.pythonPath": "..\\.nox\\devenv-3-8\\python.exe", | ||||
|     "jupyter.jupyterServerType": "remote", | ||||
|     "jupyter.jupyterServerType": "local", | ||||
|     "files.associations": { | ||||
|         "*.ksy": "yaml", | ||||
|         "*.vpy": "python", | ||||
|         "stat.h": "c" | ||||
|     } | ||||
|     }, | ||||
|     "rust-analyzer.diagnostics.disabled": ["unresolved-import"] | ||||
| } | ||||
							
								
								
									
										1718
									
								
								rust/Cargo.lock
									
										
									
										generated
									
									
									
								
							
							
						
						
									
										1718
									
								
								rust/Cargo.lock
									
										
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							|  | @ -18,59 +18,58 @@ lto = "fat" | |||
| 
 | ||||
| 
 | ||||
| [dependencies] | ||||
| pyo3 = { version = "0.15.1", features = ["extension-module","eyre"] } | ||||
| pyo3 = { version = "0.16.5", features = ["extension-module","eyre","abi3-py37"] } | ||||
| csv = "1.1.6" | ||||
| humantime = "2.1.0" | ||||
| permutohedron = "0.2.4" | ||||
| serde_json = "1.0.74" | ||||
| serde_json = "1.0.81" | ||||
| fnv = "1.0.7" | ||||
| bincode = "1.3.3" | ||||
| sha3 = "0.10.0" | ||||
| sha3 = "0.10.1" | ||||
| byteorder = "1.4.3" | ||||
| rstar = "0.9.2" | ||||
| crossbeam-channel = "0.5.2" | ||||
| rstar =  "0.9.3" | ||||
| crossbeam-channel = "0.5.4" | ||||
| better-panic = "0.3.0" | ||||
| derivative = "2.2.0" | ||||
| dict_derive = "0.4.0" | ||||
| regex = "1.5.4" | ||||
| regex = "1.5.6" | ||||
| num_cpus = "1.13.1" | ||||
| eddie = "0.4.2" | ||||
| thiserror = "1.0.30" | ||||
| pyo3-log = "0.5.0" | ||||
| log = "0.4.14" | ||||
| flate2 = "1.0.22" | ||||
| thiserror = "1.0.31" | ||||
| pyo3-log = "0.6.0" | ||||
| log = "0.4.17" | ||||
| flate2 = "1.0.24" | ||||
| eval = "0.4.3" | ||||
| pythonize = "0.15.0" | ||||
| pythonize = "0.16.0" | ||||
| itertools = "0.10.3" | ||||
| intmap = "0.7.1" | ||||
| diff-struct = "0.4.1" | ||||
| rustc-hash = "1.1.0" | ||||
| stats_alloc = "0.1.8" | ||||
| stats_alloc = "0.1.10" | ||||
| 
 | ||||
| tracing = { version = "0.1.29", optional = true } | ||||
| tracing-subscriber = { version = "0.3.5", optional = true } | ||||
| tracing-tracy = { version = "0.8.0", optional = true } | ||||
| tracing-unwrap = { version = "0.9.2", optional = true } | ||||
| tracy-client = { version = "0.12.6", optional = true } | ||||
| tracing-chrome = "0.4.0" | ||||
| rand = "0.8.4" | ||||
| eyre = "0.6.6" | ||||
| tracing = { version = "0.1.34", optional = true } | ||||
| tracing-subscriber = { version = "0.3.11", optional = true } | ||||
| tracing-tracy = { version = "0.10.0", optional = true } | ||||
| # tracing-unwrap = { version = "0.9.2", optional = true } | ||||
| tracy-client = { version = "0.14.0", optional = true } | ||||
| tracing-chrome = "0.6.0" | ||||
| rand = "0.8.5" | ||||
| eyre = "0.6.8" | ||||
| memmap = "0.7.0" | ||||
| csv-core = "0.1.10" | ||||
| postcard = { version = "0.7.3", features = ["alloc"] } | ||||
| nohash-hasher = "0.2.0" | ||||
| dashmap = "5.3.4" | ||||
| rayon = "1.5.3" | ||||
| 
 | ||||
| 
 | ||||
| [features] | ||||
| profiling = ["tracing","tracing-subscriber","tracing-tracy","tracing-unwrap","tracy-client"] | ||||
| profiling = ["tracing","tracing-subscriber","tracing-tracy","tracy-client"] | ||||
| 
 | ||||
| [dev-dependencies] | ||||
| criterion = { version = "0.3.5", features = ["real_blackbox"] } | ||||
| rand = "0.8.4" | ||||
| rand_distr = "0.4.2" | ||||
| rand = "0.8.5" | ||||
| rand_distr = "0.4.3" | ||||
| 
 | ||||
| [dependencies.serde] | ||||
| version = "1.0.133" | ||||
| version = "1.0.137" | ||||
| features = ["derive"] | ||||
| 
 | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										5
									
								
								rust/Dockerfile
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								rust/Dockerfile
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,5 @@ | |||
| FROM ghcr.io/pyo3/maturin | ||||
| 
 | ||||
| LABEL ed_lrr_dev latest | ||||
| RUN rustup default nightly | ||||
| RUN pip install maturin[zig] | ||||
							
								
								
									
										1
									
								
								rust/clippy.toml
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								rust/clippy.toml
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1 @@ | |||
| disallowed-types = ["std::collections::HashMap", "std::collections::HashSet"] | ||||
							
								
								
									
										8
									
								
								rust/docker-compose.yml
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								rust/docker-compose.yml
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,8 @@ | |||
| version: "4.0" | ||||
| services: | ||||
|   ed_lrr_build: | ||||
|     build: . | ||||
|     working_dir: /code | ||||
|     command: ["build","-r","--zig", "--compatibility","manylinux2010"] | ||||
|     volumes: | ||||
|       - .:/code | ||||
|  | @ -6,32 +6,46 @@ import shutil | |||
| import json | ||||
| 
 | ||||
| 
 | ||||
| def setup_logging(loglevel="INFO"): | ||||
| def setup_logging(loglevel="INFO", file=False): | ||||
|     import logging | ||||
|     import coloredlogs | ||||
| 
 | ||||
|     coloredlogs.DEFAULT_FIELD_STYLES["delta"] = {"color": "green"} | ||||
|     coloredlogs.DEFAULT_FIELD_STYLES["levelname"] = {"color": "yellow"} | ||||
|     logfmt = " | ".join( | ||||
|         ["[%(delta)s] %(levelname)s", "%(name)s:%(pathname)s:%(lineno)s", "%(message)s"] | ||||
|     ) | ||||
| 
 | ||||
|     class DeltaTimeFormatter(coloredlogs.ColoredFormatter): | ||||
|     class ColorDeltaTimeFormatter(coloredlogs.ColoredFormatter): | ||||
|         def format(self, record): | ||||
|             seconds = record.relativeCreated / 1000 | ||||
|             duration = timedelta(seconds=seconds) | ||||
|             record.delta = str(duration) | ||||
|             return super().format(record) | ||||
| 
 | ||||
|     coloredlogs.ColoredFormatter = DeltaTimeFormatter | ||||
|     logfmt = " | ".join( | ||||
|         ["[%(delta)s] %(levelname)s", "%(name)s:%(pathname)s:%(lineno)s", "%(message)s"] | ||||
|     ) | ||||
|     class DeltaTimeFormatter(coloredlogs.BasicFormatter): | ||||
|         def format(self, record): | ||||
|             seconds = record.relativeCreated / 1000 | ||||
|             duration = timedelta(seconds=seconds) | ||||
|             record.delta = str(duration) | ||||
|             return super().format(record) | ||||
| 
 | ||||
|     logger = logging.getLogger() | ||||
|     if file: | ||||
|         open("ed_lrr_test.log", "w").close() | ||||
|         fh = logging.FileHandler("ed_lrr_test.log") | ||||
|         fh.setLevel(logging.DEBUG) | ||||
|         fh.setFormatter(DeltaTimeFormatter(logfmt)) | ||||
|         logger.addHandler(fh) | ||||
|     coloredlogs.DEFAULT_FIELD_STYLES["delta"] = {"color": "green"} | ||||
|     coloredlogs.DEFAULT_FIELD_STYLES["levelname"] = {"color": "yellow"} | ||||
| 
 | ||||
|     coloredlogs.ColoredFormatter = ColorDeltaTimeFormatter | ||||
|     numeric_level = getattr(logging, loglevel.upper(), None) | ||||
|     if not isinstance(numeric_level, int): | ||||
|         raise ValueError("Invalid log level: %s" % loglevel) | ||||
|     coloredlogs.install(level=numeric_level, fmt=logfmt) | ||||
|     coloredlogs.install(level=numeric_level, fmt=logfmt, logger=logger) | ||||
|     return | ||||
| 
 | ||||
| 
 | ||||
| setup_logging() | ||||
| 
 | ||||
| JUMP_RANGE = 48 | ||||
| globals().setdefault("__file__", r"D:\devel\rust\ed_lrr_gui\rust\run_test.py") | ||||
| dirname = os.path.dirname(__file__) or "." | ||||
|  | @ -39,28 +53,38 @@ os.chdir(dirname) | |||
| t_start = datetime.now() | ||||
| os.environ["PYO3_PYTHON"] = sys.executable | ||||
| if "--clean" in sys.argv[1:]: | ||||
|     SP.check_call(["cargo","clean"]) | ||||
|     SP.check_call(["cargo", "clean"]) | ||||
| if "--build" in sys.argv[1:]: | ||||
|     SP.check_call(["cargo","lcheck"]) | ||||
|     SP.check_call(["cargo", "lcheck"]) | ||||
|     SP.check_call([sys.executable, "-m", "pip", "install", "-e", ".."]) | ||||
|     print("Build+Install took:", datetime.now() - t_start) | ||||
| 
 | ||||
| sys.path.append("..") | ||||
| setup_logging(file=True) | ||||
| _ed_lrr = __import__("_ed_lrr") | ||||
| 
 | ||||
| 
 | ||||
| def callback(state): | ||||
|     print(state) | ||||
| 
 | ||||
| 
 | ||||
| print(_ed_lrr) | ||||
| r = _ed_lrr.PyRouter(callback) | ||||
| r.load("../stars_2.csv", immediate=False) | ||||
| print(r) | ||||
| r.str_tree_test() | ||||
| 
 | ||||
| exit() | ||||
| 
 | ||||
| r = _ed_lrr.PyRouter(callback) | ||||
| r.load("../stars.csv", immediate=False) | ||||
| print(r.resolve("Sol","Saggitarius A","Colonia","Merope")) | ||||
| # r.nb_perf_test(JUMP_RANGE) | ||||
| # exit() | ||||
| # start, end = "Sol", "Colonia" | ||||
| # systems = r.resolve(start, end) | ||||
| # sys_ids = {k: v["id"] for k, v in systems.items()} | ||||
| 
 | ||||
| r.bfs_test(JUMP_RANGE) | ||||
| 
 | ||||
| # cfg = {} | ||||
| # cfg["mode"] = "incremental_broadening" | ||||
| # # input("{}>".format(os.getpid())) | ||||
| # # route = r.precompute_neighbors(JUMP_RANGE) | ||||
| # route = r.route([sys_ids[start], sys_ids[end]], JUMP_RANGE, cfg, 0) | ||||
| # print("Optimal route:", len(route)) | ||||
| 
 | ||||
| exit() | ||||
| 
 | ||||
|  | @ -68,10 +92,12 @@ ships = _ed_lrr.PyShip.from_journal() | |||
| r = _ed_lrr.PyRouter(callback) | ||||
| r.load("../stars.csv", immediate=False) | ||||
| 
 | ||||
| def func(*args,**kwargs): | ||||
| 
 | ||||
| def func(*args, **kwargs): | ||||
|     print(kwargs) | ||||
|     return 12 | ||||
| 
 | ||||
| 
 | ||||
| r.precompute_neighbors(JUMP_RANGE) | ||||
| 
 | ||||
| exit() | ||||
|  | @ -119,6 +145,7 @@ start, end = "Sol", "Colonia" | |||
| systems = r.resolve(start, end) | ||||
| sys_ids = {k: v["id"] for k, v in systems.items()} | ||||
| 
 | ||||
| 
 | ||||
| cfg = {} | ||||
| cfg["mode"] = "incremental_broadening" | ||||
| # input("{}>".format(os.getpid())) | ||||
|  |  | |||
							
								
								
									
										2
									
								
								rust/rust-toolchain.toml
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								rust/rust-toolchain.toml
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,2 @@ | |||
| [toolchain] | ||||
| channel = "nightly" | ||||
|  | @ -1,11 +1,12 @@ | |||
| //! # Common utlility functions
 | ||||
| use crate::route::{LineCache, Router}; | ||||
| use crate::route::Router; | ||||
| use bincode::Options; | ||||
| use crossbeam_channel::{bounded, Receiver}; | ||||
| use csv::ByteRecord; | ||||
| use dict_derive::IntoPyObject; | ||||
| use eyre::Result; | ||||
| use log::*; | ||||
| use nohash_hasher::NoHashHasher; | ||||
| use pyo3::prelude::*; | ||||
| use pyo3::types::PyDict; | ||||
| use pyo3::{conversion::ToPyObject, create_exception}; | ||||
|  | @ -13,8 +14,7 @@ use pythonize::depythonize; | |||
| use serde::{Deserialize, Serialize}; | ||||
| use sha3::{Digest, Sha3_256}; | ||||
| use std::collections::{BTreeMap, HashMap, HashSet, VecDeque}; | ||||
| use std::hash::{Hash, Hasher, BuildHasherDefault}; | ||||
| use std::io::Write; | ||||
| use std::hash::{BuildHasherDefault, Hash, Hasher}; | ||||
| use std::ops::{Deref, DerefMut}; | ||||
| use std::path::Path; | ||||
| use std::str::FromStr; | ||||
|  | @ -25,22 +25,21 @@ use std::{ | |||
|     io::{BufReader, BufWriter}, | ||||
|     path::PathBuf, | ||||
| }; | ||||
| use nohash_hasher::NoHashHasher; | ||||
| use thiserror::Error; | ||||
| 
 | ||||
| #[inline(always)] | ||||
| pub fn heuristic(range: f32, node: &TreeNode, goal: &TreeNode) -> f32 { | ||||
|     // distance remaining after jumping from node towards goal
 | ||||
|     let a2 = dist2(&node.pos, &goal.pos); | ||||
|     let mult=node.get_mult(); | ||||
|     let b2 = range * range * mult*mult; | ||||
|     let a2 = dist(&node.pos, &goal.pos); | ||||
|     let mult = node.get_mult(); | ||||
|     let b2 = range * mult; | ||||
|     return (a2 - b2).max(0.0); | ||||
| } | ||||
| 
 | ||||
| /// Min-heap priority queue using f32 as priority
 | ||||
| pub struct MinFHeap<T: Ord>(pub BinaryHeap<(Reverse<F32>, T)>); | ||||
| pub struct MinFHeap<T: Ord>(BinaryHeap<(Reverse<F32>, T)>); | ||||
| /// Max-heap priority queue using f32 as priority
 | ||||
| pub struct MaxFHeap<T: Ord>(pub BinaryHeap<(F32, T)>); | ||||
| pub struct MaxFHeap<T: Ord>(BinaryHeap<(F32, T)>); | ||||
| 
 | ||||
| impl<T: Ord> MaxFHeap<T> { | ||||
|     /// Create new, empty priority queue
 | ||||
|  | @ -391,7 +390,7 @@ pub enum SysEntry { | |||
| } | ||||
| 
 | ||||
| impl ToPyObject for SysEntry { | ||||
|     fn to_object(&self, py: Python) -> PyObject { | ||||
|     fn to_object(&self, py: Python<'_>) -> PyObject { | ||||
|         match self { | ||||
|             Self::ID(id) => id.to_object(py), | ||||
|             Self::Name(name) => name.to_object(py), | ||||
|  | @ -483,99 +482,6 @@ pub fn ndot(u: &[f32; 3], v: &[f32; 3]) -> f32 { | |||
|     (u[0] * v[0]) / lm + (u[1] * v[1]) / lm + (u[2] * v[2]) / lm | ||||
| } | ||||
| 
 | ||||
| /// Fuzzy string matcher, use to resolve star system names
 | ||||
| #[cfg_attr(feature = "profiling", tracing::instrument(skip(rx)))] | ||||
| fn matcher( | ||||
|     rx: Receiver<ByteRecord>, | ||||
|     names: Vec<String>, | ||||
|     exact: bool, | ||||
| ) -> HashMap<String, (f64, Option<u32>)> { | ||||
|     let mut best: HashMap<String, (f64, Option<u32>)> = HashMap::new(); | ||||
|     for name in &names { | ||||
|         best.insert(name.to_string(), (0.0, None)); | ||||
|     } | ||||
|     let names_u8: Vec<(String, _)> = names.iter().map(|n| (n.clone(), n.as_bytes())).collect(); | ||||
|     let sdist = eddie::slice::Levenshtein::new(); | ||||
|     for sys in rx.into_iter() { | ||||
|         for (name, name_b) in &names_u8 { | ||||
|             if let Some(ent) = best.get_mut(name) { | ||||
|                 if (ent.0 - 1.0).abs() < std::f64::EPSILON { | ||||
|                     continue; | ||||
|                 } | ||||
|                 if exact && (&sys[1] == *name_b) { | ||||
|                     let id = std::str::from_utf8(&sys[0]).unwrap().parse().unwrap(); | ||||
|                     *ent = (1.0, Some(id)); | ||||
|                     continue; | ||||
|                 } | ||||
|                 let d = sdist.similarity(&sys[1], name_b); | ||||
|                 if d > ent.0 { | ||||
|                     let id = std::str::from_utf8(&sys[0]).unwrap().parse().unwrap(); | ||||
|                     *ent = (d, Some(id)); | ||||
|                 } | ||||
|             }; | ||||
|         } | ||||
|     } | ||||
|     best | ||||
| } | ||||
| 
 | ||||
| /// Scan through the csv file at `path` and return a hash map
 | ||||
| /// mapping the strings from `names` to a tuple `(score, Option<system_id>)`.
 | ||||
| /// Scoring matching uses the normalized Levenshtein distance where 1.0 is an exact match.
 | ||||
| pub fn find_matches( | ||||
|     path: &Path, | ||||
|     names: Vec<String>, | ||||
|     exact: bool, | ||||
| ) -> Result<HashMap<String, (f64, Option<u32>)>, String> { | ||||
|     let mut best: HashMap<String, (f64, Option<u32>)> = HashMap::new(); | ||||
|     if names.is_empty() { | ||||
|         return Ok(best); | ||||
|     } | ||||
|     for name in &names { | ||||
|         best.insert(name.to_string(), (0.0, None)); | ||||
|     } | ||||
| 
 | ||||
|     let mut workers = Vec::new(); | ||||
|     let ncpus = num_cpus::get(); | ||||
|     let (tx, rx) = bounded(4096 * ncpus); | ||||
|     for _ in 0..ncpus { | ||||
|         let names = names.clone(); | ||||
|         let rx = rx.clone(); | ||||
|         let th = thread::spawn(move || matcher(rx, names, exact)); | ||||
|         workers.push(th); | ||||
|     } | ||||
| 
 | ||||
|     let mut rdr = match csv::ReaderBuilder::new().has_headers(false).from_path(path) { | ||||
|         Ok(rdr) => rdr, | ||||
|         Err(e) => { | ||||
|             return Err(format!("Error opening {}: {}", path.to_str().unwrap(), e)); | ||||
|         } | ||||
|     }; | ||||
|     let t_start = std::time::Instant::now(); | ||||
|     let mut processed: usize = 0; | ||||
|     for record in rdr.byte_records().flat_map(|v| v.ok()) { | ||||
|         tx.send(record).unwrap(); | ||||
|         processed += 1; | ||||
|     } | ||||
|     drop(tx); | ||||
|     while let Some(th) = workers.pop() { | ||||
|         for (name, (score, sys)) in th.join().unwrap().iter() { | ||||
|             best.entry(name.clone()).and_modify(|ent| { | ||||
|                 if score > &ent.0 { | ||||
|                     *ent = (*score, *sys); | ||||
|                 } | ||||
|             }); | ||||
|         } | ||||
|     } | ||||
|     let dt = std::time::Instant::now() - t_start; | ||||
|     info!( | ||||
|         "Searched {} records in {:?}: {} records/second", | ||||
|         processed, | ||||
|         dt, | ||||
|         (processed as f64) / dt.as_secs_f64() | ||||
|     ); | ||||
|     Ok(best) | ||||
| } | ||||
| 
 | ||||
| /// Hash the contents of `path` with sha3 and return the hash as a vector of bytes
 | ||||
| fn hash_file(path: &Path) -> Vec<u8> { | ||||
|     let mut hash_reader = BufReader::new(File::open(path).unwrap()); | ||||
|  | @ -632,7 +538,7 @@ pub struct TreeNode { | |||
| } | ||||
| 
 | ||||
| impl ToPyObject for TreeNode { | ||||
|     fn to_object(&self, py: Python) -> PyObject { | ||||
|     fn to_object(&self, py: Python<'_>) -> PyObject { | ||||
|         pythonize::pythonize(py, self).unwrap() | ||||
|     } | ||||
| } | ||||
|  | @ -647,7 +553,7 @@ impl TreeNode { | |||
|         match self.flags { | ||||
|             0b11 => 4.0, | ||||
|             0b10 => 1.5, | ||||
|             _ => 1.0 | ||||
|             _ => 1.0, | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | @ -696,22 +602,21 @@ pub struct System { | |||
| 
 | ||||
| impl System { | ||||
|     fn get_flags(&self) -> u8 { | ||||
|         let mut flags=0; | ||||
|         if self.mult==4.0 { | ||||
|             return 0b11 | ||||
|         if self.mult == 4.0 { | ||||
|             return 0b11; | ||||
|         } | ||||
|         if self.mult==1.5 { | ||||
|             return 0b10 | ||||
|         if self.mult == 1.5 { | ||||
|             return 0b10; | ||||
|         } | ||||
|         if self.has_scoopable { | ||||
|             return 0b01 | ||||
|             return 0b01; | ||||
|         } | ||||
|         return 0b00 | ||||
|         return 0b00; | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl ToPyObject for System { | ||||
|     fn to_object(&self, py: Python) -> PyObject { | ||||
|     fn to_object(&self, py: Python<'_>) -> PyObject { | ||||
|         let d = PyDict::new(py); | ||||
|         d.set_item("id", self.id).unwrap(); | ||||
|         d.set_item("name", self.name.clone()).unwrap(); | ||||
|  | @ -771,51 +676,55 @@ impl<T> Default for DQueue<T> { | |||
|     } | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| #[derive(Debug, Default, Serialize, Deserialize)] | ||||
| struct BKTreeNode { | ||||
|     ids: HashSet<u32,BuildHasherDefault<NoHashHasher<u32>>>, | ||||
|     children: HashMap<u8,Self,BuildHasherDefault<NoHashHasher<u8>>> | ||||
|     ids: HashSet<u32, BuildHasherDefault<NoHashHasher<u32>>>, | ||||
|     children: HashMap<u8, Self, BuildHasherDefault<NoHashHasher<u8>>>, | ||||
| } | ||||
| 
 | ||||
| impl BKTreeNode { | ||||
|     fn new(data: &[String], dist: &eddie::str::Levenshtein) -> Self { | ||||
|         let mut tree= Self::default(); | ||||
|         let mut max_depth=0; | ||||
|         (0..data.len()).map(|id| { | ||||
|             max_depth=max_depth.max(tree.insert(data,id as u32, dist,0)); | ||||
|             if (id>0) && (id%100_000 == 0) { | ||||
|                 println!("Inserting ID {}, Max Depth: {}",id,max_depth); | ||||
|             } | ||||
|         }).max(); | ||||
|         println!("Max Depth: {}",max_depth); | ||||
|         let mut tree = Self::default(); | ||||
|         let mut max_depth = 0; | ||||
|         (0..data.len()) | ||||
|             .map(|id| { | ||||
|                 max_depth = max_depth.max(tree.insert(data, id as u32, dist, 0)); | ||||
|                 if (id > 0) && (id % 100_000 == 0) { | ||||
|                     println!("Inserting ID {}, Max Depth: {}", id, max_depth); | ||||
|                 } | ||||
|             }) | ||||
|             .max(); | ||||
|         println!("Max Depth: {}", max_depth); | ||||
|         tree | ||||
|     } | ||||
| 
 | ||||
|     fn from_id(id: u32) -> Self { | ||||
|         let mut ret=Self::default(); | ||||
|         let mut ret = Self::default(); | ||||
|         ret.ids.insert(id); | ||||
|         return ret; | ||||
|     } | ||||
| 
 | ||||
|     fn insert(&mut self, data: &[String],id: u32, dist: &eddie::str::Levenshtein, depth: usize) -> usize { | ||||
|     fn insert( | ||||
|         &mut self, | ||||
|         data: &[String], | ||||
|         id: u32, | ||||
|         dist: &eddie::str::Levenshtein, | ||||
|         depth: usize, | ||||
|     ) -> usize { | ||||
|         if self.is_empty() { | ||||
|             self.ids.insert(id); | ||||
|             return depth; | ||||
|         } | ||||
|         let idx = self.get_id().unwrap() as usize; | ||||
|         let self_key = data.get(idx).unwrap(); | ||||
|         let ins_key = data.get(id as usize).unwrap(); | ||||
|         let dist_key = dist.distance(self_key,ins_key) as u8; | ||||
|         if dist_key==0 { | ||||
|         let dist_key = dist.distance(&data[idx], &data[id as usize]) as u8; | ||||
|         if dist_key == 0 { | ||||
|             self.ids.insert(id); | ||||
|             return depth; | ||||
|         } | ||||
|         if let Some(child) = self.children.get_mut(&dist_key) { | ||||
|             return child.insert(data,id,dist,depth+1); | ||||
|             return child.insert(data, id, dist, depth + 1); | ||||
|         } else { | ||||
|             self.children.insert(dist_key,Self::from_id(id)); | ||||
|             self.children.insert(dist_key, Self::from_id(id)); | ||||
|             return depth; | ||||
|         } | ||||
|     } | ||||
|  | @ -835,13 +744,11 @@ pub struct BKTree { | |||
|     root: BKTreeNode, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| impl BKTree { | ||||
|     pub fn new(data: &[String], base_id: u32) -> Self { | ||||
|         let dist = eddie::str::Levenshtein::new(); | ||||
|         let root = BKTreeNode::new(data, &dist); | ||||
|         Self {base_id,root} | ||||
|         Self { base_id, root } | ||||
|     } | ||||
| 
 | ||||
|     pub fn id(&self) -> u32 { | ||||
|  | @ -851,8 +758,8 @@ impl BKTree { | |||
|     pub fn dump(&self, fh: &mut BufWriter<File>) -> EdLrrResult<()> { | ||||
|         let options = bincode::DefaultOptions::new(); | ||||
|         let amt = options.serialized_size(self)?; | ||||
|         println!("Writing {}",amt); | ||||
|         options.serialize_into(fh,self)?; | ||||
|         println!("Writing {}", amt); | ||||
|         options.serialize_into(fh, self)?; | ||||
|         Ok(()) | ||||
|     } | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										196
									
								
								rust/src/lib.rs
									
										
									
									
									
								
							
							
						
						
									
										196
									
								
								rust/src/lib.rs
									
										
									
									
									
								
							|  | @ -1,4 +1,6 @@ | |||
| #![feature(binary_heap_retain)] | ||||
| #![allow(dead_code, clippy::needless_return, clippy::too_many_arguments)] | ||||
| #![warn(rust_2018_idioms, rust_2021_compatibility, clippy::disallowed_types)] | ||||
| //! # Elite: Danerous Long Range Router
 | ||||
| pub mod common; | ||||
| pub mod galaxy; | ||||
|  | @ -10,13 +12,9 @@ pub mod route; | |||
| pub mod search_algos; | ||||
| pub mod ship; | ||||
| 
 | ||||
| use bincode::Options; | ||||
| use csv::{Position, StringRecord}; | ||||
| use eddie::Levenshtein; | ||||
| // =========================
 | ||||
| use stats_alloc::{Region, StatsAlloc, INSTRUMENTED_SYSTEM}; | ||||
| use std::alloc::System as SystemAlloc; | ||||
| use std::cell::RefMut; | ||||
| use std::collections::BTreeMap; | ||||
| use std::io::{BufWriter, Write}; | ||||
| use std::path::Path; | ||||
|  | @ -30,8 +28,7 @@ mod profiling { | |||
|     pub fn init() {} | ||||
| } | ||||
| 
 | ||||
| extern crate derivative; | ||||
| use crate::common::{find_matches, grid_stats, EdLrrError, SysEntry, System}; | ||||
| use crate::common::{grid_stats, EdLrrError, SysEntry, System}; | ||||
| #[cfg(feature = "profiling")] | ||||
| use crate::profiling::*; | ||||
| use crate::route::{Router, SearchState}; | ||||
|  | @ -39,16 +36,14 @@ use crate::ship::Ship; | |||
| use eyre::Result; | ||||
| #[cfg(not(feature = "profiling"))] | ||||
| use log::*; | ||||
| use pyo3::create_exception; | ||||
| use pyo3::exceptions::*; | ||||
| use pyo3::prelude::*; | ||||
| use pyo3::types::{IntoPyDict, PyDict, PyTuple}; | ||||
| use pyo3::{create_exception, PyObjectProtocol}; | ||||
| use route::{LineCache, PyModeConfig}; | ||||
| use std::{ | ||||
|     cell::RefCell, collections::HashMap, convert::TryInto, fs::File, io::BufReader, path::PathBuf, | ||||
| }; | ||||
| use route::PyModeConfig; | ||||
| use std::{collections::HashMap, convert::TryInto, fs::File, path::PathBuf}; | ||||
| 
 | ||||
| #[cfg(feature = "profiling")] | ||||
| #[cfg(feature = "mem_profiling")] | ||||
| #[global_allocator] | ||||
| static GLOBAL: ProfiledAllocator<std::alloc::System> = | ||||
|     ProfiledAllocator::new(std::alloc::System, 1024); | ||||
|  | @ -87,8 +82,6 @@ impl PyRouter { | |||
|             .ok_or_else(|| PyErr::from(EdLrrError::RuntimeError("no stars.csv loaded".to_owned()))) | ||||
|             .map(PathBuf::from) | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
| } | ||||
| 
 | ||||
| #[pymethods] | ||||
|  | @ -115,25 +108,23 @@ impl PyRouter { | |||
| 
 | ||||
|     #[args(primary_only = "false", immediate = "false")] | ||||
|     #[pyo3(text_signature = "(path, primary_only, /)")] | ||||
|     fn load(&mut self, path: String, py: Python, immediate: bool) -> PyResult<PyObject> { | ||||
|     fn load(&mut self, path: String, py: Python<'_>, immediate: bool) -> PyResult<PyObject> { | ||||
|         self.stars_path = Some(path); | ||||
|         if immediate { | ||||
|             let stars_path = self.check_stars()?; | ||||
|             let route_res = self.router.load(&stars_path); | ||||
|             if let Err(err_msg) = route_res { | ||||
|                 return Err(PyErr::new::<PyValueError, _>(err_msg)); | ||||
|             }; | ||||
|             self.router | ||||
|                 .load(&self.check_stars()?) | ||||
|                 .map_err(PyErr::new::<PyValueError, _>)?; | ||||
|         } | ||||
|         Ok(py.None()) | ||||
|     } | ||||
| 
 | ||||
|     #[pyo3(text_signature = "(/)")] | ||||
|     fn unload(&mut self, py: Python) -> PyObject { | ||||
|     fn unload(&mut self, py: Python<'_>) -> PyObject { | ||||
|         self.router.unload(); | ||||
|         py.None() | ||||
|     } | ||||
| 
 | ||||
|     fn plot(&mut self, py: Python) -> PyResult<PyObject> { | ||||
|     fn plot(&mut self, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         let stars_path = self.check_stars()?; | ||||
|         let route_res = self.router.load(&stars_path); | ||||
|         if let Err(err_msg) = route_res { | ||||
|  | @ -155,7 +146,7 @@ impl PyRouter { | |||
|         Ok(plot_bbox.to_object(py)) | ||||
|     } | ||||
| 
 | ||||
|     fn run_bfs(&mut self, range: f32, py: Python) -> PyResult<PyObject> { | ||||
|     fn run_bfs(&mut self, range: f32, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         let stars_path = self.check_stars()?; | ||||
|         let route_res = self.router.load(&stars_path); | ||||
|         if let Err(err_msg) = route_res { | ||||
|  | @ -167,7 +158,7 @@ impl PyRouter { | |||
|             .map(|_| py.None()) | ||||
|     } | ||||
| 
 | ||||
|     fn precompute_graph(&mut self, range: f32, py: Python) -> PyResult<PyObject> { | ||||
|     fn precompute_graph(&mut self, range: f32, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         let stars_path = self.check_stars()?; | ||||
|         let route_res = self.router.load(&stars_path); | ||||
|         if let Err(err_msg) = route_res { | ||||
|  | @ -179,31 +170,29 @@ impl PyRouter { | |||
|             .map(|_| py.None()) | ||||
|     } | ||||
| 
 | ||||
|     fn nb_perf_test(&mut self, range: f32, py: Python) -> PyResult<PyObject> { | ||||
|     fn nb_perf_test(&mut self, range: f32, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         let stars_path = self.check_stars()?; | ||||
|         let route_res = self.router.load(&stars_path); | ||||
|         if let Err(err_msg) = route_res { | ||||
|             return Err(PyErr::new::<PyValueError, _>(err_msg)); | ||||
|         }; | ||||
|         let mut nbmap = BTreeMap::new(); | ||||
|         let tree = self.router.get_tree(); | ||||
|         let total_nodes = tree.size(); | ||||
|         let mut total_nbs = 0; | ||||
|         for (n, node) in tree.iter().enumerate() { | ||||
|             let nbs = self | ||||
|                 .router | ||||
|                 .neighbours(node, range) | ||||
|                 .map(|nb| nb.id) | ||||
|                 .collect::<Vec<_>>(); | ||||
|             nbmap.insert(node.id, nbs); | ||||
|             total_nbs += self.router.neighbours(node, range).count(); | ||||
|             // nbmap.insert(node.id, nbs);
 | ||||
|             if n % 100_000 == 0 { | ||||
|                 println!("{}/{}", n, total_nodes); | ||||
|                 let avg = total_nbs as f64 / (n + 1) as f64; | ||||
|                 info!("{}/{} {} ({})", n, total_nodes, total_nbs, avg); | ||||
|             } | ||||
|         } | ||||
|         println!("{}", nbmap.len()); | ||||
|         let avg = total_nbs as f64 / total_nodes as f64; | ||||
|         info!("Total: {} ({})", total_nbs, avg); | ||||
|         Ok(py.None()) | ||||
|     } | ||||
| 
 | ||||
|     fn precompute_neighbors(&mut self, range: f32, py: Python) -> PyResult<PyObject> { | ||||
|     fn precompute_neighbors(&mut self, range: f32, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         let stars_path = self.check_stars()?; | ||||
|         let route_res = self.router.load(&stars_path); | ||||
|         if let Err(err_msg) = route_res { | ||||
|  | @ -215,6 +204,36 @@ impl PyRouter { | |||
|             .map(|_| py.None()) | ||||
|     } | ||||
| 
 | ||||
|     fn bfs_test(&mut self, range: f32) -> PyResult<()> { | ||||
|         use rand::prelude::*; | ||||
|         let stars_path = self.check_stars()?; | ||||
|         let route_res = self.router.load(&stars_path); | ||||
|         if let Err(err_msg) = route_res { | ||||
|             return Err(PyErr::new::<PyValueError, _>(err_msg)); | ||||
|         }; | ||||
|         let mut rng = rand::rngs::StdRng::seed_from_u64(0); | ||||
|         let nodes = self.router.get_tree().size(); | ||||
|         loop { | ||||
|             let source = *self | ||||
|                 .router | ||||
|                 .get_tree() | ||||
|                 .iter() | ||||
|                 .nth(rng.gen_range(0..nodes)) | ||||
|                 .unwrap(); | ||||
|             let goal = *self | ||||
|                 .router | ||||
|                 .get_tree() | ||||
|                 .iter() | ||||
|                 .nth(rng.gen_range(0..nodes)) | ||||
|                 .unwrap(); | ||||
|             self.router.bfs_loop_test(range, &source, &goal, 0); | ||||
|             for w in 0..=15 { | ||||
|                 self.router.bfs_loop_test(range, &source, &goal, 2usize.pow(w)); | ||||
|             } | ||||
|         } | ||||
|         Ok(()) | ||||
|     } | ||||
| 
 | ||||
|     #[args(
 | ||||
|         greedyness = "0.5", | ||||
|         max_dist = "0.0", | ||||
|  | @ -238,7 +257,7 @@ impl PyRouter { | |||
|         let ids: Vec<u32> = match resolve(&hops, &self.router.path, true) { | ||||
|             Ok(sytems) => sytems.into_iter().map(|id| id.into_id()).collect(), | ||||
|             Err(err_msg) => { | ||||
|                 return Err(EdLrrError::ResolveError(err_msg).into()); | ||||
|                 return Err(err_msg.into()); | ||||
|             } | ||||
|         }; | ||||
|         let mut is_default = false; | ||||
|  | @ -287,7 +306,7 @@ impl PyRouter { | |||
|         return res; | ||||
|     } | ||||
| 
 | ||||
|     fn perf_test(&self, callback: PyObject, py: Python) -> PyResult<PyObject> { | ||||
|     fn perf_test(&self, callback: PyObject, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         use common::TreeNode; | ||||
|         let node = TreeNode { | ||||
|             pos: [-65.21875, 7.75, -111.03125], | ||||
|  | @ -320,22 +339,34 @@ impl PyRouter { | |||
| 
 | ||||
|     #[args(grid_size = "1.0")] | ||||
|     #[pyo3(text_signature = "(grid_size)")] | ||||
|     fn get_grid(&self, grid_size: f32, py: Python) -> PyResult<PyObject> { | ||||
|     fn get_grid(&self, grid_size: f32, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         let stars_path = self.check_stars()?; | ||||
|         grid_stats(&stars_path, grid_size) | ||||
|             .map(|ret| ret.to_object(py)) | ||||
|             .map_err(PyErr::new::<PyRuntimeError, _>) | ||||
|     } | ||||
| 
 | ||||
|     fn floyd_warshall_test(&mut self, range: f32) -> PyResult<Vec<common::System>> { | ||||
|         let stars_path = self.check_stars()?; | ||||
|         self.router | ||||
|             .load(&stars_path) | ||||
|             .map_err(PyErr::new::<PyValueError, _>)?; | ||||
|         let res = self | ||||
|             .router | ||||
|             .floyd_warshall(range) | ||||
|             .map_err(PyErr::new::<RoutingError, _>)?; | ||||
|         Ok(res) | ||||
|     } | ||||
| 
 | ||||
|     #[args(hops = "*")] | ||||
|     #[pyo3(text_signature = "(sys_1, sys_2, ..., /)")] | ||||
|     fn resolve(&self, hops: Vec<SysEntry>, py: Python) -> PyResult<PyObject> { | ||||
|     fn resolve(&self, hops: Vec<SysEntry>, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         info!("Resolving systems..."); | ||||
|         let stars_path = self.check_stars()?; | ||||
|         let systems: Vec<System> = match resolve(&hops, &stars_path, false) { | ||||
|             Ok(sytems) => sytems.into_iter().map(|sys| sys.into_system()).collect(), | ||||
|             Err(err_msg) => { | ||||
|                 return Err(EdLrrError::ResolveError(err_msg).into()); | ||||
|                 return Err(err_msg.into()); | ||||
|             } | ||||
|         }; | ||||
|         let ret: Vec<(_, System)> = hops | ||||
|  | @ -356,15 +387,15 @@ impl PyRouter { | |||
|             .map_err(EdLrrError::from)?; | ||||
|         let mut data: Vec<String> = Vec::with_capacity(CHUNK_SIZE); | ||||
|         let t_start = Instant::now(); | ||||
|         let mut base_id=0; | ||||
|         let mut base_id = 0; | ||||
|         let mut wr = BufWriter::new(File::create("test.bktree")?); | ||||
|         for sys in reader.into_deserialize::<System>() { | ||||
|             let sys = sys?; | ||||
|             data.push(sys.name); | ||||
|             if data.len()>CHUNK_SIZE { | ||||
|             if data.len() > CHUNK_SIZE { | ||||
|                 let tree = BKTree::new(&data, base_id); | ||||
|                 tree.dump(&mut wr)?; | ||||
|                 base_id=sys.id; | ||||
|                 base_id = sys.id; | ||||
|             } | ||||
|         } | ||||
|         if !data.is_empty() { | ||||
|  | @ -375,10 +406,7 @@ impl PyRouter { | |||
|         println!("Took: {:?}", t_start.elapsed()); | ||||
|         Ok(()) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[pyproto] | ||||
| impl PyObjectProtocol for PyRouter { | ||||
|     fn __str__(&self) -> PyResult<String> { | ||||
|         Ok(format!("{:?}", &self)) | ||||
|     } | ||||
|  | @ -387,7 +415,6 @@ impl PyObjectProtocol for PyRouter { | |||
|         Ok(format!("{:?}", &self)) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| enum ResolveResult { | ||||
|     System(System), | ||||
|     ID(u32), | ||||
|  | @ -409,7 +436,11 @@ impl ResolveResult { | |||
|     } | ||||
| } | ||||
| 
 | ||||
| fn resolve(entries: &[SysEntry], path: &Path, id_only: bool) -> Result<Vec<ResolveResult>, String> { | ||||
| fn resolve( | ||||
|     entries: &[SysEntry], | ||||
|     path: &Path, | ||||
|     id_only: bool, | ||||
| ) -> Result<Vec<ResolveResult>, EdLrrError> { | ||||
|     let mut names: Vec<String> = Vec::new(); | ||||
|     let mut ret: Vec<u32> = Vec::new(); | ||||
|     let mut needs_rtree = false; | ||||
|  | @ -423,7 +454,10 @@ fn resolve(entries: &[SysEntry], path: &Path, id_only: bool) -> Result<Vec<Resol | |||
|         } | ||||
|     } | ||||
|     if !path.exists() { | ||||
|         return Err(format!("Source file {:?} does not exist!", path.display())); | ||||
|         return Err(EdLrrError::ResolveError(format!( | ||||
|             "Source file {:?} does not exist!", | ||||
|             path.display() | ||||
|         ))); | ||||
|     } | ||||
|     let name_ids = if !names.is_empty() { | ||||
|         mmap_csv::mmap_csv(path, names)? | ||||
|  | @ -439,12 +473,12 @@ fn resolve(entries: &[SysEntry], path: &Path, id_only: bool) -> Result<Vec<Resol | |||
|     for ent in entries { | ||||
|         match ent { | ||||
|             SysEntry::Name(name) => { | ||||
|                 let ent_res = name_ids | ||||
|                     .get(name) | ||||
|                     .ok_or(format!("System {} not found", name))?; | ||||
|                 let sys = ent_res | ||||
|                     .as_ref() | ||||
|                     .ok_or(format!("System {} not found", name))?; | ||||
|                 let ent_res = name_ids.get(name).ok_or_else(|| { | ||||
|                     EdLrrError::ResolveError(format!("System {} not found", name)) | ||||
|                 })?; | ||||
|                 let sys = ent_res.as_ref().ok_or_else(|| { | ||||
|                     EdLrrError::ResolveError(format!("System {} not found", name)) | ||||
|                 })?; | ||||
|                 ret.push(*sys); | ||||
|             } | ||||
|             SysEntry::ID(id) => ret.push(*id), | ||||
|  | @ -453,7 +487,7 @@ fn resolve(entries: &[SysEntry], path: &Path, id_only: bool) -> Result<Vec<Resol | |||
|                     .as_ref() | ||||
|                     .unwrap() | ||||
|                     .closest(&[*x, *y, *z]) | ||||
|                     .ok_or("No systems loaded!")? | ||||
|                     .ok_or_else(|| EdLrrError::ResolveError("No systems loaded!".to_string()))? | ||||
|                     .id, | ||||
|             ), | ||||
|         } | ||||
|  | @ -476,29 +510,17 @@ fn resolve(entries: &[SysEntry], path: &Path, id_only: bool) -> Result<Vec<Resol | |||
| struct PyShip { | ||||
|     ship: Ship, | ||||
| } | ||||
| 
 | ||||
| #[pyproto] | ||||
| impl PyObjectProtocol for PyShip { | ||||
|     fn __str__(&self) -> PyResult<String> { | ||||
|         Ok(format!("{:?}", &self.ship)) | ||||
|     } | ||||
| 
 | ||||
|     fn __repr__(&self) -> PyResult<String> { | ||||
|         Ok(format!("{:?}", &self.ship)) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| #[pymethods] | ||||
| impl PyShip { | ||||
|     #[staticmethod] | ||||
|     fn from_loadout(py: Python, loadout: &str) -> PyResult<PyObject> { | ||||
|     fn from_loadout(py: Python<'_>, loadout: &str) -> PyResult<PyObject> { | ||||
|         match Ship::new_from_json(loadout) { | ||||
|             Ok(ship) => Ok((PyShip { ship: ship.1 }).into_py(py)), | ||||
|             Err(err_msg) => Err(PyErr::new::<PyValueError, _>(err_msg)), | ||||
|         } | ||||
|     } | ||||
|     #[staticmethod] | ||||
|     fn from_journal(py: Python) -> PyResult<PyObject> { | ||||
|     fn from_journal(py: Python<'_>) -> PyResult<PyObject> { | ||||
|         let mut ship = match Ship::new_from_journal() { | ||||
|             Ok(ship) => ship, | ||||
|             Err(err_msg) => { | ||||
|  | @ -516,38 +538,38 @@ impl PyShip { | |||
|         Ok(PyDict::from_sequence(py, ships.to_object(py))?.to_object(py)) | ||||
|     } | ||||
| 
 | ||||
|     fn to_dict(&self, py: Python) -> PyResult<PyObject> { | ||||
|     fn to_dict(&self, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         self.ship.to_object(py) | ||||
|     } | ||||
| 
 | ||||
|     #[pyo3(text_signature = "(dist, /)")] | ||||
|     fn fuel_cost(&self, _py: Python, dist: f32) -> f32 { | ||||
|     fn fuel_cost(&self, _py: Python<'_>, dist: f32) -> f32 { | ||||
|         self.ship.fuel_cost(dist) | ||||
|     } | ||||
| 
 | ||||
|     #[getter] | ||||
|     fn range(&self, _py: Python) -> f32 { | ||||
|     fn range(&self, _py: Python<'_>) -> f32 { | ||||
|         self.ship.range() | ||||
|     } | ||||
| 
 | ||||
|     #[getter] | ||||
|     fn max_range(&self, _py: Python) -> f32 { | ||||
|     fn max_range(&self, _py: Python<'_>) -> f32 { | ||||
|         self.ship.max_range() | ||||
|     } | ||||
| 
 | ||||
|     #[pyo3(text_signature = "(dist, /)")] | ||||
|     fn make_jump(&mut self, dist: f32, _py: Python) -> Option<f32> { | ||||
|     fn make_jump(&mut self, dist: f32, _py: Python<'_>) -> Option<f32> { | ||||
|         self.ship.make_jump(dist) | ||||
|     } | ||||
| 
 | ||||
|     #[pyo3(text_signature = "(dist, /)")] | ||||
|     fn can_jump(&self, dist: f32, _py: Python) -> bool { | ||||
|     fn can_jump(&self, dist: f32, _py: Python<'_>) -> bool { | ||||
|         self.ship.can_jump(dist) | ||||
|     } | ||||
| 
 | ||||
|     #[args(fuel_amount = "None")] | ||||
|     #[pyo3(text_signature = "(fuel_amount, /)")] | ||||
|     fn refuel(&mut self, fuel_amount: Option<f32>, _py: Python) { | ||||
|     fn refuel(&mut self, fuel_amount: Option<f32>, _py: Python<'_>) { | ||||
|         if let Some(fuel) = fuel_amount { | ||||
|             self.ship.fuel_mass = (self.ship.fuel_mass + fuel).min(self.ship.fuel_capacity) | ||||
|         } else { | ||||
|  | @ -556,9 +578,17 @@ impl PyShip { | |||
|     } | ||||
| 
 | ||||
|     #[pyo3(text_signature = "(factor, /)")] | ||||
|     fn boost(&mut self, factor: f32, _py: Python) { | ||||
|     fn boost(&mut self, factor: f32, _py: Python<'_>) { | ||||
|         self.ship.boost(factor); | ||||
|     } | ||||
| 
 | ||||
|     fn __str__(&self) -> PyResult<String> { | ||||
|         Ok(format!("{:?}", &self.ship)) | ||||
|     } | ||||
| 
 | ||||
|     fn __repr__(&self) -> PyResult<String> { | ||||
|         Ok(format!("{:?}", &self.ship)) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl PyShip { | ||||
|  | @ -572,14 +602,14 @@ fn preprocess_edsm( | |||
|     _bodies_path: &str, | ||||
|     _systems_path: &str, | ||||
|     _out_path: &str, | ||||
|     _py: Python, | ||||
|     _py: Python<'_>, | ||||
| ) -> PyResult<()> { | ||||
|     Err(pyo3::exceptions::PyNotImplementedError::new_err( | ||||
|         "please use Spansh's Galaxy dump and preprocess_galaxy()", | ||||
|     )) | ||||
| } | ||||
| 
 | ||||
| fn to_py_value(value: eval::Value, py: Python) -> PyResult<PyObject> { | ||||
| fn to_py_value(value: eval::Value, py: Python<'_>) -> PyResult<PyObject> { | ||||
|     type Value = eval::Value; | ||||
|     match value { | ||||
|         Value::String(s) => Ok(s.to_object(py)), | ||||
|  | @ -611,14 +641,14 @@ fn to_py_value(value: eval::Value, py: Python) -> PyResult<PyObject> { | |||
|     } | ||||
| } | ||||
| 
 | ||||
| fn to_py(res: Result<eval::Value, eval::Error>, py: Python) -> PyResult<PyObject> { | ||||
| fn to_py(res: Result<eval::Value, eval::Error>, py: Python<'_>) -> PyResult<PyObject> { | ||||
|     res.map_err(|e| PyErr::from(EdLrrError::EvalError(e))) | ||||
|         .and_then(|r| to_py_value(r, py)) | ||||
| } | ||||
| 
 | ||||
| #[pyfunction] | ||||
| #[pyo3(text_signature = "(expr)")] | ||||
| fn expr_test(expr: &str, py: Python) -> PyResult<PyObject> { | ||||
| fn expr_test(expr: &str, py: Python<'_>) -> PyResult<PyObject> { | ||||
|     use eval::{to_value, Expr, Value}; | ||||
|     let mut res = Expr::new(expr) | ||||
|         .compile() | ||||
|  | @ -647,7 +677,7 @@ fn preprocess_galaxy(path: &str, out_path: &str) -> PyResult<()> { | |||
| } | ||||
| 
 | ||||
| #[pymodule] | ||||
| pub fn _ed_lrr(_py: Python, m: &PyModule) -> PyResult<()> { | ||||
| pub fn _ed_lrr(_py: Python<'_>, m: &PyModule) -> PyResult<()> { | ||||
|     better_panic::install(); | ||||
|     pyo3_log::init(); | ||||
|     profiling::init(); | ||||
|  |  | |||
|  | @ -1,26 +1,62 @@ | |||
| use crate::common::{EdLrrError, EdLrrResult, System}; | ||||
| use crate::info; | ||||
| use crossbeam_channel::bounded; | ||||
| use csv_core::{ReadFieldResult, Reader}; | ||||
| use dashmap::DashMap; | ||||
| use eyre::Result; | ||||
| use itertools::Itertools; | ||||
| use memmap::Mmap; | ||||
| use std::collections::HashMap; | ||||
| use std::fs::File; | ||||
| use std::path::Path; | ||||
| use std::sync::Arc; | ||||
| 
 | ||||
| pub fn mmap_csv(path: &Path, query: Vec<String>) -> Result<HashMap<String, Option<u32>>, String> { | ||||
|     let file = File::open(path).map_err(|e| e.to_string())?; | ||||
|     let mm = unsafe { Mmap::map(&file) }.map_err(|e| e.to_string())?; | ||||
|     let mut best = query | ||||
|         .iter() | ||||
|         .map(|s| (s, (s.as_bytes(), usize::MAX, u32::MAX))) | ||||
|         .collect::<Vec<(&String, (_, usize, u32))>>(); | ||||
|     let t_start = std::time::Instant::now(); | ||||
|     let dist = eddie::slice::DamerauLevenshtein::new(); | ||||
|     let mut row = 0; | ||||
|     { | ||||
|         let mut data = &mm[..]; | ||||
| struct MmapCsv { | ||||
|     mm: Mmap, | ||||
| } | ||||
| 
 | ||||
| impl MmapCsv { | ||||
|     fn new(path: &Path) -> Result<Self> { | ||||
|         let file = File::open(path)?; | ||||
|         let mm = unsafe { Mmap::map(&file) }?; | ||||
|         Ok(Self { mm }) | ||||
|     } | ||||
| 
 | ||||
|     fn search(&self, query: Vec<String>) -> Result<HashMap<String, Option<u32>>, EdLrrError> { | ||||
|         let t_start = std::time::Instant::now(); | ||||
|         let map = Arc::new(DashMap::new()); | ||||
|         let (tx, rx) = bounded(1024); | ||||
|         let query_b = query.iter().map(|s| s.bytes().collect_vec()).collect_vec(); | ||||
|         let mut workers = (0..(num_cpus::get())) | ||||
|             .map(|_| { | ||||
|                 let query_b = query_b.clone(); | ||||
|                 let query = query.clone(); | ||||
|                 let rx = rx.clone(); | ||||
|                 let map = map.clone(); | ||||
|                 std::thread::spawn(move || { | ||||
|                     let dist = eddie::slice::DamerauLevenshtein::new(); | ||||
|                     rx.into_iter() | ||||
|                         // .flatten()
 | ||||
|                         .for_each(|(id, name): (_, Vec<u8>)| { | ||||
|                             for (query, query_b) in query.iter().zip(query_b.iter()) { | ||||
|                                 let d = dist.distance(name.as_slice(), query_b); | ||||
|                                 let mut e = map.entry(query.clone()).or_insert((usize::MAX, None)); | ||||
|                                 if d < e.0 { | ||||
|                                     *e = (d, Some(id)); | ||||
|                                 } | ||||
|                             } | ||||
|                         }); | ||||
|                 }) | ||||
|             }) | ||||
|             .collect_vec(); | ||||
|         drop(rx); | ||||
|         let mut data = &self.mm[..]; | ||||
|         let mut rdr = Reader::new(); | ||||
|         let mut field = [0; 1024]; | ||||
|         let mut fieldidx = 0; | ||||
|         // let mut chunk = vec![];
 | ||||
|         let mut sys_id = 0u32; | ||||
|         let mut row = 0; | ||||
|         loop { | ||||
|             let (result, nread, nwrite) = rdr.read_field(data, &mut field); | ||||
|             data = &data[nread..]; | ||||
|  | @ -28,18 +64,22 @@ pub fn mmap_csv(path: &Path, query: Vec<String>) -> Result<HashMap<String, Optio | |||
|             match result { | ||||
|                 ReadFieldResult::InputEmpty => {} | ||||
|                 ReadFieldResult::OutputFull => { | ||||
|                     return Err("Encountered field larget than 1024 bytes!".to_string()); | ||||
|                     return Err(EdLrrError::ResolveError( | ||||
|                         "Encountered field larget than 1024 bytes!".to_string(), | ||||
|                     )); | ||||
|                 } | ||||
|                 ReadFieldResult::Field { record_end } => { | ||||
|                     if fieldidx == 1 { | ||||
|                         for (_, (name_b, best_dist, id)) in best.iter_mut() { | ||||
|                             let d = dist.distance(name_b, field); | ||||
|                             if d < *best_dist { | ||||
|                                 *best_dist = d; | ||||
|                                 *id = row; | ||||
|                             } | ||||
|                     match fieldidx { | ||||
|                         0 => { | ||||
|                             sys_id = unsafe { std::str::from_utf8_unchecked(field) } | ||||
|                                 .parse::<u32>() | ||||
|                                 .unwrap(); | ||||
|                         } | ||||
|                     } | ||||
|                         1 => tx | ||||
|                             .send((sys_id, field.to_vec())) | ||||
|                             .map_err(|e| EdLrrError::ResolveError(e.to_string()))?, | ||||
|                         _ => (), | ||||
|                     }; | ||||
|                     if record_end { | ||||
|                         fieldidx = 0; | ||||
|                         row += 1; | ||||
|  | @ -54,16 +94,28 @@ pub fn mmap_csv(path: &Path, query: Vec<String>) -> Result<HashMap<String, Optio | |||
|                 } | ||||
|             } | ||||
|         } | ||||
|         drop(tx); | ||||
|         for w in workers.drain(..) { | ||||
|             w.join().unwrap(); | ||||
|         } | ||||
|         let res = Arc::try_unwrap(map) | ||||
|             .unwrap() | ||||
|             .into_iter() | ||||
|             .map(|(k, (_, id))| (k, id)) | ||||
|             .collect::<HashMap<_, _>>(); | ||||
|         let rate = (row as f64) / t_start.elapsed().as_secs_f64(); | ||||
|         info!( | ||||
|             "Took: {:.2?}, {:.2} systems/second", | ||||
|             t_start.elapsed(), | ||||
|             rate | ||||
|         ); | ||||
|         Ok(res) | ||||
|     } | ||||
|     let search_result = best | ||||
|         .drain(..) | ||||
|         .map(|(query_name, (_, _, idx))| (query_name.clone(), Some(idx))) | ||||
|         .collect::<HashMap<String, Option<u32>>>(); | ||||
|     let rate = (row as f64) / t_start.elapsed().as_secs_f64(); | ||||
|     info!( | ||||
|         "Took: {:.2?}, {:.2} systems/second", | ||||
|         t_start.elapsed(), | ||||
|         rate | ||||
|     ); | ||||
|     Ok(search_result) | ||||
| } | ||||
| 
 | ||||
| pub fn mmap_csv( | ||||
|     path: &Path, | ||||
|     query: Vec<String>, | ||||
| ) -> Result<HashMap<String, Option<u32>>, EdLrrError> { | ||||
|     MmapCsv::new(path)?.search(query) | ||||
| } | ||||
|  |  | |||
|  | @ -7,6 +7,7 @@ use crate::profiling::{span, Level}; | |||
| use crate::ship::Ship; | ||||
| 
 | ||||
| use crossbeam_channel::{bounded, unbounded, Receiver, SendError, Sender}; | ||||
| use dashmap::{DashMap, DashSet}; | ||||
| use derivative::Derivative; | ||||
| use dict_derive::IntoPyObject; | ||||
| 
 | ||||
|  | @ -18,6 +19,8 @@ use permutohedron::LexicalPermutation; | |||
| 
 | ||||
| use pyo3::prelude::*; | ||||
| use pythonize::depythonize; | ||||
| use rayon::prelude::*; | ||||
| use rayon::ThreadPoolBuilder; | ||||
| use rstar::{PointDistance, RStarInsertionStrategy, RTree, RTreeObject, RTreeParams, AABB}; | ||||
| use rustc_hash::{FxHashMap, FxHashSet}; | ||||
| use serde::{Deserialize, Serialize}; | ||||
|  | @ -27,10 +30,11 @@ use std::fs::File; | |||
| use std::hash::{Hash, Hasher}; | ||||
| use std::io::{BufReader, BufWriter, Write}; | ||||
| use std::path::PathBuf; | ||||
| use std::sync::atomic::{AtomicUsize, Ordering}; | ||||
| use std::sync::{Arc, Mutex}; | ||||
| use std::thread; | ||||
| use std::thread::JoinHandle; | ||||
| use std::time::Instant; | ||||
| use std::time::{Duration, Instant}; | ||||
| use std::{ | ||||
|     collections::{BinaryHeap, VecDeque}, | ||||
|     path::Path, | ||||
|  | @ -317,8 +321,8 @@ impl TryFrom<PyModeConfig> for ModeConfig { | |||
| #[allow(non_camel_case_types)] | ||||
| pub enum PrecomputeMode { | ||||
|     Full, | ||||
|     Route_From, | ||||
|     Route_To, | ||||
|     Route_From(u32), | ||||
|     Route_To(u32), | ||||
|     None, | ||||
| } | ||||
| 
 | ||||
|  | @ -817,6 +821,74 @@ impl Router { | |||
|         return self.scoopable.contains(&id); | ||||
|     } | ||||
| 
 | ||||
|     pub fn bfs_loop_test(&self, range: f32, source: &TreeNode, goal: &TreeNode, n: usize) -> (bool, usize, usize) { | ||||
|         // info!("Starting thread pool");
 | ||||
|         // ThreadPoolBuilder::new()
 | ||||
|         //     .num_threads(8)
 | ||||
|         //     .build_global()
 | ||||
|         //     .unwrap();
 | ||||
|         let t_start = Instant::now(); | ||||
|         let route_dist = dist(&source.pos, &goal.pos); | ||||
|         let seen: Arc<DashMap<u32, u32>> = Arc::new(DashMap::new()); | ||||
|         let mut depth = 0; | ||||
|         let mut queue = vec![*source]; | ||||
|         let mut queue_next = vec![]; | ||||
|         let tree = self.tree.clone(); | ||||
|         let r2 = range * range; | ||||
|         let mut found = false; | ||||
|         while !queue.is_empty() { | ||||
|             depth += 1; | ||||
|             let seen = seen.clone(); | ||||
|             queue_next.extend(queue.drain(..).flat_map(|sys| { | ||||
|                 let seen = seen.clone(); | ||||
|                 tree.locate_within_distance(sys.pos, r2) | ||||
|                     .filter_map(move |nb| seen.insert(nb.id, sys.id).is_none().then_some(*nb)) | ||||
|             })); | ||||
|             if seen.contains_key(&goal.id) { | ||||
|                 found = true; | ||||
|                 break; | ||||
|             } | ||||
|             std::mem::swap(&mut queue_next, &mut queue); | ||||
|             if n != 0 { | ||||
|                 queue.sort_by_cached_key(|v| F32(heuristic(range, v, goal))); | ||||
|                 queue.truncate(n); | ||||
|             } | ||||
|             // info!("[{}|{}] {}", goal.id, depth, queue.len());
 | ||||
|         } | ||||
|         let seen = Arc::try_unwrap(seen) | ||||
|             .unwrap() | ||||
|             .into_iter() | ||||
|             .collect::<FxHashMap<u32, u32>>(); | ||||
|         info!( | ||||
|             "[{}|{}->{} ({:.02} Ly)|{}] Depth: {} Seen: {} ({:.02}%) Took: {}", | ||||
|             n, | ||||
|             source.id, | ||||
|             goal.id, | ||||
|             route_dist, | ||||
|             found, | ||||
|             depth, | ||||
|             seen.len(), | ||||
|             ((seen.len() as f64) / (tree.size() as f64)) * 100.0, | ||||
|             humantime::format_duration(t_start.elapsed()) | ||||
|         ); | ||||
|         return (found, depth, seen.len()); | ||||
|         let path=self.reconstruct(goal.id, &seen); | ||||
|     } | ||||
| 
 | ||||
|     fn reconstruct(&self, goal_id: u32, map: &FxHashMap<u32, u32>) -> Result<Vec<System>, String> { | ||||
|         let mut path = vec![]; | ||||
|         let mut current = goal_id; | ||||
|         while let Some(next) = map.get(¤t) { | ||||
|             path.push( | ||||
|                 self.get(*next)? | ||||
|                     .ok_or(format!("System ID {} not found", next))?, | ||||
|             ); | ||||
|             current = *next; | ||||
|         } | ||||
|         path.reverse(); | ||||
|         Ok(path) | ||||
|     } | ||||
| 
 | ||||
|     fn best_multiroute( | ||||
|         &mut self, | ||||
|         waypoints: &[System], | ||||
|  | @ -1468,7 +1540,9 @@ impl Router { | |||
|                 let mut refuels = state.refuels; | ||||
|                 let dist = dist(&nb.pos, &state.node.pos); | ||||
|                 let (fuel_cost, new_fuel) = { | ||||
|                     if let Some(res) = ship.fuel_cost_for_jump(state.fuel, dist, state.node.get_mult()) { | ||||
|                     if let Some(res) = | ||||
|                         ship.fuel_cost_for_jump(state.fuel, dist, state.node.get_mult()) | ||||
|                     { | ||||
|                         // can jump with current amount of fuel
 | ||||
|                         res | ||||
|                     } else if let Some(res) = | ||||
|  | @ -1544,7 +1618,22 @@ impl Router { | |||
|             }) | ||||
|     } | ||||
| 
 | ||||
|     pub fn floyd_warshall(&self, _range: f32) { | ||||
|     pub fn floyd_warshall(&self, range: f32) -> Result<Vec<System>, String> { | ||||
|         let mut dist: FxHashMap<u64, usize> = FxHashMap::default(); | ||||
|         info!("nb..."); | ||||
|         let total = self.tree.size(); | ||||
|         for (n, node) in self.tree.iter().enumerate() { | ||||
|             if (n % 100_000) == 0 { | ||||
|                 println!("{}/{}", n, total); | ||||
|             } | ||||
|             let key = (node.id as u64) << 32; | ||||
|             for nb in self.neighbours(node, range) { | ||||
|                 let key = key | nb.id as u64; | ||||
|                 dist.entry(key).or_insert(1); | ||||
|             } | ||||
|             let key = ((node.id as u64) << 32) | node.id as u64; | ||||
|             dist.insert(key, 0); | ||||
|         } | ||||
|         todo!() | ||||
|     } | ||||
| 
 | ||||
|  | @ -1553,86 +1642,81 @@ impl Router { | |||
|         h = (dist(node,goal)-(range*node.mult)).max(0.0) // remaining distance after jumping from here
 | ||||
|         */ | ||||
|         let src = self.tree.nearest_neighbor(&[0.0, 0.0, 0.0]).unwrap(); | ||||
|         // let mut route_log = BufWriter::new(File::create("route_log_ib.txt").map_err(|e| e.to_string())?);
 | ||||
|         let goal = self | ||||
|             .tree | ||||
|             // .nearest_neighbor(&[-1111.5625, -134.21875, 65269.75]) // Beagle Point
 | ||||
|             .nearest_neighbor(&[-9530.5, -910.28125, 19808.125]) // Colonia
 | ||||
|             .nearest_neighbor(&[-1111.5625, -134.21875, 65269.75]) // Beagle Point
 | ||||
|             // .nearest_neighbor(&[-9530.5, -910.28125, 19808.125]) // Colonia
 | ||||
|             .unwrap(); | ||||
|         let mut best_node = FxHashMap::default(); | ||||
|         let mut prev = FxHashMap::default(); | ||||
|         let mut wait_list: FxHashMap<usize, MinFHeap<TreeNode>> = FxHashMap::default(); | ||||
|         let mut in_wait_list: FxHashSet<u32> = FxHashSet::default(); | ||||
|         // let mut prev = FxHashMap::default();
 | ||||
|         let mut queue = MinFHeap::new(); | ||||
|         let t_start = Instant::now(); | ||||
|         let mut n = 0usize; | ||||
|         let mut skipped = 0usize; | ||||
|         let mut global_best = u32::MAX; | ||||
|         queue.push(heuristic(range, src, goal), (0, src)); | ||||
|         loop { | ||||
|             let t_start = Instant::now(); | ||||
|             let mut n = 0usize; | ||||
|             let mut skipped = 0usize; | ||||
|             let mut depth = 0usize; | ||||
|             let mut queue = VecDeque::new(); | ||||
|             queue.push_back(*src); | ||||
|             'outer: loop { | ||||
|                 // println!("D: {} | Q: {}", depth, queue.len());
 | ||||
|                 let mut queue_next = VecDeque::new(); | ||||
|                 if queue.is_empty() { | ||||
|                     warn!( | ||||
|                         "Depth: {} | Visited: {} | Skipped: {} | search space exhausted after {}", | ||||
|                         depth, | ||||
|                         n, | ||||
|                         skipped, | ||||
|                         humantime::format_duration(t_start.elapsed()) | ||||
|                     ); | ||||
|                     break; | ||||
|             println!("Q: {}", queue.len()); | ||||
|             if queue.is_empty() { | ||||
|                 warn!( | ||||
|                     "Visited: {} | Skipped: {} | search space exhausted after {}", | ||||
|                     n, | ||||
|                     skipped, | ||||
|                     humantime::format_duration(t_start.elapsed()) | ||||
|                 ); | ||||
|                 break; | ||||
|             } | ||||
|             while let Some((_, (depth, node))) = queue.pop() { | ||||
|                 let best_len = best_node.len(); | ||||
|                 let best_depth = best_node.entry(node.id).or_insert(depth); | ||||
|                 if *best_depth > global_best { | ||||
|                     skipped += 1; | ||||
|                     continue; | ||||
|                 } | ||||
|                 while let Some(node) = queue.pop_front() { | ||||
|                     let best_len = best_node.len(); | ||||
|                     let best_depth = best_node.entry(node.id).or_insert(depth); | ||||
|                     if depth > *best_depth { | ||||
|                         skipped += 1; | ||||
|                         continue; | ||||
|                     } | ||||
|                     if depth < *best_depth { | ||||
|                         *best_depth = depth; | ||||
|                     } | ||||
|                     n += 1; | ||||
|                     if node.id == goal.id { | ||||
|                 // writeln!(route_log,"{}, {}",node.id,depth).map_err(|e| e.to_string())?;
 | ||||
|                 // route_log.flush().map_err(|e| e.to_string())?;
 | ||||
|                 if depth < *best_depth { | ||||
|                     *best_depth = depth; | ||||
|                 } | ||||
|                 n += 1; | ||||
|                 if node.id == goal.id { | ||||
|                     if depth < global_best { | ||||
|                         global_best = global_best.min(depth); | ||||
|                         queue.retain(|(_, (d, _))| *d <= global_best); | ||||
|                         info!( | ||||
|                             "Depth: {}, Skipped: {}, Seen: {} (Total: {}) | Best: {} | elapsed: {}", | ||||
|                             depth, | ||||
|                             "Queued: {}, Skipped: {}, Seen: {} (Total: {}) | Best: {} | elapsed: {}", | ||||
|                             queue.len(), | ||||
|                             skipped, | ||||
|                             n, | ||||
|                             best_len, | ||||
|                             best_depth, | ||||
|                             global_best, | ||||
|                             humantime::format_duration(t_start.elapsed()).to_string() | ||||
|                         ); | ||||
|                         for layer_n in wait_list.keys().sorted() { | ||||
|                             println!("WL({}): {}", layer_n, wait_list[layer_n].len()); | ||||
|                         } | ||||
|                         todo!(); | ||||
|                         break 'outer; | ||||
|                     } | ||||
|                     let valid_nbs = self | ||||
|                         .neighbours(&node, node.get_mult() * range) | ||||
|                         .filter(|nb| (self.valid(nb.id) || (nb.id == goal.id))) | ||||
|                         .filter(|nb| match best_node.get(&nb.id) { | ||||
|                             Some(&d) => (depth + 1) <= d, | ||||
|                             None => true, | ||||
|                         }) | ||||
|                         .map(|nb| { | ||||
|                             prev.insert(nb.id, node); | ||||
|                             (F32(heuristic(range, nb, goal)), *nb) | ||||
|                         }); | ||||
|                     queue_next.extend(valid_nbs); | ||||
|                     continue; | ||||
|                 } else if n % 10000 == 0 { | ||||
|                     info!( | ||||
|                         "Queued: {}, Skipped: {}, Seen: {} (Total: {}) | Best: {} | elapsed: {}", | ||||
|                         queue.len(), | ||||
|                         skipped, | ||||
|                         n, | ||||
|                         best_len, | ||||
|                         global_best, | ||||
|                         humantime::format_duration(t_start.elapsed()).to_string() | ||||
|                     ); | ||||
|                 } | ||||
|                 queue_next.make_contiguous().sort(); | ||||
|                 if let Some((_, nb)) = queue_next.pop_front() { | ||||
|                     queue.push_back(nb); | ||||
|                 } | ||||
|                 let layer = wait_list.entry(depth).or_default(); | ||||
|                 while let Some((F32(v), nb)) = queue_next.pop_front() { | ||||
|                     if in_wait_list.insert(nb.id) { | ||||
|                         layer.push(v, nb); | ||||
|                     }; | ||||
|                 } | ||||
|                 depth += 1; | ||||
|                 self.neighbours(node, node.get_mult() * range) | ||||
|                     .filter(|nb| (self.valid(nb.id) || (nb.id == goal.id))) | ||||
|                     .filter(|nb| match best_node.get(&nb.id) { | ||||
|                         Some(&d) => depth < d, | ||||
|                         None => true, | ||||
|                     }) | ||||
|                     .map(|nb| (heuristic(range, nb, goal), nb)) | ||||
|                     .for_each(|(h, nb)| { | ||||
|                         // prev.insert(nb.id, node.id);
 | ||||
|                         queue.push(h, (depth + 1, nb)); | ||||
|                     }); | ||||
|             } | ||||
|         } | ||||
|         todo!() | ||||
|  | @ -1705,7 +1789,7 @@ impl Router { | |||
|                 let tx = tx_r.clone(); | ||||
|                 let rx = rx_q.clone(); | ||||
|                 thread::spawn(move || { | ||||
|                     while let Ok(nodes) = rx.recv() { | ||||
|                     rx.into_iter().for_each(|nodes| { | ||||
|                         let mut ret = vec![]; | ||||
|                         for node in nodes { | ||||
|                             let res: Vec<TreeNode> = | ||||
|  | @ -1713,9 +1797,8 @@ impl Router { | |||
|                             ret.push((node, res)); | ||||
|                         } | ||||
|                         tx.send(ret).unwrap(); | ||||
|                     } | ||||
|                     }); | ||||
|                     drop(tx); | ||||
|                     drop(rx); | ||||
|                 }) | ||||
|             }) | ||||
|             .collect(); | ||||
|  | @ -1784,49 +1867,31 @@ impl Router { | |||
| 
 | ||||
|     #[cfg_attr(feature = "profiling", tracing::instrument)] | ||||
|     pub fn precompute_all(&mut self, range: f32) -> Result<(), String> { | ||||
|         use flate2::write::GzEncoder; | ||||
|         let fh_nb = File::create(format!(r#"O:\nb_{}.dat"#, range)).unwrap(); | ||||
|         let mut buf_writer = BufWriter::new(fh_nb); | ||||
|         let mut fh_encoder = BufWriter::new(fh_nb); | ||||
|         let mut pos: u64 = 0; | ||||
|         let mut n = 0; | ||||
|         let total = self.tree.size(); | ||||
|         let (tx, rx, threads) = self.neighbor_workers(num_cpus::get(), range); | ||||
|         let mut n: usize = 0; | ||||
|         // let (tx, rx, threads) = self.neighbor_workers(num_cpus::get(), range);
 | ||||
|         let mut map: FxHashMap<u32, u64> = FxHashMap::default(); | ||||
|         info!("Precomputing neighbor map"); | ||||
|         info!("Sumbitting jobs"); | ||||
|         self.tree | ||||
|             .iter() | ||||
|             .chunks(10_000) | ||||
|             .into_iter() | ||||
|             .for_each(|chunk| { | ||||
|                 tx.send(chunk.cloned().collect()).unwrap(); | ||||
|             }); | ||||
|         drop(tx); | ||||
|         info!("Processing..."); | ||||
|         rx.into_iter() | ||||
|             .flatten() | ||||
|             .enumerate() | ||||
|             .for_each(|(n, (node, mut neighbors))| { | ||||
|                 let neighbors: Vec<u32> = neighbors.drain(..).map(|n| n.id).collect(); | ||||
|                 // map.insert(node.id, pos);
 | ||||
|                 pos += buf_writer | ||||
|                     .write(&bincode::serialize(&neighbors).unwrap()) | ||||
|                     .unwrap() as u64; | ||||
|                 if (n % 100000) == 0 { | ||||
|                     let prc = ((n as f64) / (total as f64)) * 100f64; | ||||
|                     info!("{}/{} ({:.2}%) done, {} bytes", n, total, prc, pos); | ||||
|                 } | ||||
|             }); | ||||
|         info!("Precomputing neighbor map..."); | ||||
|         self.tree.iter().for_each(|node| { | ||||
|             let nb = self.neighbours(node, range).map(|nb| nb.id).collect_vec(); | ||||
|             map.insert(node.id, pos); | ||||
|             pos += fh_encoder.write(&bincode::serialize(&nb).unwrap()).unwrap() as u64; | ||||
|             if (n % 10000) == 0 { | ||||
|                 let prc = ((n as f64) / (total as f64)) * 100f64; | ||||
|                 info!("{}/{} ({:.2}%) done, {} bytes", n, total, prc, pos); | ||||
|             } | ||||
|             n += 1; | ||||
|         }); | ||||
|         let mut fh_idx = BufWriter::new(File::create(format!(r#"O:\nb_{}.idx"#, range)).unwrap()); | ||||
|         info!("Writing index map"); | ||||
|         info!( | ||||
|             "Wrote {} bytes", | ||||
|             fh_idx.write(&bincode::serialize(&map).unwrap()).unwrap() | ||||
|         ); | ||||
|         info!("Joining threads"); | ||||
|         for t in threads { | ||||
|             t.join().unwrap(); | ||||
|         } | ||||
|         info!("Done!"); | ||||
|         Ok(()) | ||||
|     } | ||||
| 
 | ||||
|  | @ -2476,13 +2541,15 @@ impl Router { | |||
|                 let next_depth = depth + 1; | ||||
|                 match node { | ||||
|                     BiDirNode::Forward(node) => { | ||||
|                         let nbs = self.neighbours(&node, node.get_mult() * range).filter_map(|nb| { | ||||
|                             if !seen_fwd.insert(nb.id) { | ||||
|                                 return None; | ||||
|                             } | ||||
|                             prev.insert(nb.id, node.id); | ||||
|                             Some((next_depth, BiDirNode::Forward(*nb))) | ||||
|                         }); | ||||
|                         let nbs = | ||||
|                             self.neighbours(&node, node.get_mult() * range) | ||||
|                                 .filter_map(|nb| { | ||||
|                                     if !seen_fwd.insert(nb.id) { | ||||
|                                         return None; | ||||
|                                     } | ||||
|                                     prev.insert(nb.id, node.id); | ||||
|                                     Some((next_depth, BiDirNode::Forward(*nb))) | ||||
|                                 }); | ||||
|                         queue.extend(nbs); | ||||
|                     } | ||||
|                     BiDirNode::Backwards(node) => { | ||||
|  |  | |||
|  | @ -229,7 +229,7 @@ impl Ship { | |||
| } | ||||
| 
 | ||||
| impl FSD { | ||||
|     pub fn to_object(&self, py: Python) -> PyResult<PyObject> { | ||||
|     pub fn to_object(&self, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         let elem = PyDict::new(py); | ||||
|         elem.set_item("rating_val", self.rating_val)?; | ||||
|         elem.set_item("class_val", self.class_val)?; | ||||
|  | @ -242,7 +242,7 @@ impl FSD { | |||
| } | ||||
| 
 | ||||
| impl Ship { | ||||
|     pub fn to_object(&self, py: Python) -> PyResult<PyObject> { | ||||
|     pub fn to_object(&self, py: Python<'_>) -> PyResult<PyObject> { | ||||
|         let elem = PyDict::new(py); | ||||
|         elem.set_item("base_mass", self.base_mass)?; | ||||
|         elem.set_item("fuel_mass", self.fuel_mass)?; | ||||
|  |  | |||
							
								
								
									
										6
									
								
								setup.py
									
										
									
									
									
								
							
							
						
						
									
										6
									
								
								setup.py
									
										
									
									
									
								
							|  | @ -1,5 +1,5 @@ | |||
| # -*- coding: utf-8 -*- | ||||
| from setuptools import find_packages, setup | ||||
| from setuptools import find_packages, find_namespace_packages, setup | ||||
| from setuptools_rust import Binding, RustExtension, Strip | ||||
| import os | ||||
| 
 | ||||
|  | @ -68,7 +68,7 @@ setup( | |||
|     description="Elite: Dangerous long range route plotter", | ||||
|     long_description=long_description, | ||||
|     long_description_content_type="text/markdown", | ||||
|     url="https://gitlab.com/Earthnuker/ed_lrr/-/tree/pyqt_gui", | ||||
|     url="https://gitdab.com/Earthnuker/ED_LRR/src/branch/pyqt_gui", | ||||
|     rust_extensions=[ | ||||
|         RustExtension( | ||||
|             "_ed_lrr", | ||||
|  | @ -82,7 +82,7 @@ setup( | |||
|             quiet=True, | ||||
|         ) | ||||
|     ], | ||||
|     packages=find_packages(), | ||||
|     packages=find_namespace_packages(), | ||||
|     entry_points={ | ||||
|         "console_scripts": ["ed_lrr = ed_lrr_gui.__main__:main"], | ||||
|         "gui_scripts": ["ed_lrr_gui = ed_lrr_gui.__main__:gui_main"], | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue