2022-02-23

This commit is contained in:
Daniel S. 2022-02-23 22:45:59 +01:00
parent 35a0c40d14
commit dc68cce9ed
80 changed files with 859345 additions and 4387 deletions

19
rust/.vscode/settings.json vendored Normal file
View file

@ -0,0 +1,19 @@
{
"spellright.language": [
"de",
"en"
],
"spellright.documentTypes": [
"latex",
"plaintext",
"git-commit"
],
"discord.enabled": true,
"python.pythonPath": "..\\.nox\\devenv-3-8\\python.exe",
"jupyter.jupyterServerType": "remote",
"files.associations": {
"*.ksy": "yaml",
"*.vpy": "python",
"stat.h": "c"
}
}

1660
rust/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,42 +1,79 @@
[package]
name = "ed_lrr"
version = "0.2.0"
authors = [ "Daniel Seiller <earthnuker@gmail.com>",]
edition = "2018"
repository = "https://gitlab.com/Earthnuker/ed_lrr.git"
license = "MIT"
[lib]
crate-type = [ "cdylib",]
name = "_ed_lrr"
[dependencies]
csv = "1.1.3"
humantime = "2.0.1"
permutohedron = "0.2.4"
serde_json = "1.0.55"
fnv = "1.0.7"
bincode = "1.2.1"
sha3 = "0.9.0"
byteorder = "1.3.4"
strsim = "0.10.0"
rstar = "0.8.0"
crossbeam-channel = "0.4.2"
better-panic = "0.2.0"
derivative = "2.1.1"
dict_derive = "0.2.0"
num_cpus = "1.13.0"
regex = "1.3.9"
chrono = "0.4.11"
[dependencies.pyo3]
git = "https://github.com/PyO3/pyo3"
features = [ "extension-module",]
[dependencies.serde]
version = "1.0.112"
features = [ "derive",]
[profile.release]
codegen-units = 1
lto = true
[package]
name = "ed_lrr"
version = "0.2.0"
authors = ["Daniel Seiller <earthnuker@gmail.com>"]
edition = "2018"
repository = "https://gitlab.com/Earthnuker/ed_lrr.git"
license = "MIT"
[lib]
crate-type = ["cdylib"]
name = "_ed_lrr"
[profile.release]
codegen-units = 1
opt-level = 3
debug = true
lto = "fat"
[dependencies]
pyo3 = { version = "0.15.1", features = ["extension-module","eyre"] }
csv = "1.1.6"
humantime = "2.1.0"
permutohedron = "0.2.4"
serde_json = "1.0.74"
fnv = "1.0.7"
bincode = "1.3.3"
sha3 = "0.10.0"
byteorder = "1.4.3"
rstar = "0.9.2"
crossbeam-channel = "0.5.2"
better-panic = "0.3.0"
derivative = "2.2.0"
dict_derive = "0.4.0"
regex = "1.5.4"
num_cpus = "1.13.1"
eddie = "0.4.2"
thiserror = "1.0.30"
pyo3-log = "0.5.0"
log = "0.4.14"
flate2 = "1.0.22"
eval = "0.4.3"
pythonize = "0.15.0"
itertools = "0.10.3"
intmap = "0.7.1"
diff-struct = "0.4.1"
rustc-hash = "1.1.0"
stats_alloc = "0.1.8"
tracing = { version = "0.1.29", optional = true }
tracing-subscriber = { version = "0.3.5", optional = true }
tracing-tracy = { version = "0.8.0", optional = true }
tracing-unwrap = { version = "0.9.2", optional = true }
tracy-client = { version = "0.12.6", optional = true }
tracing-chrome = "0.4.0"
rand = "0.8.4"
eyre = "0.6.6"
memmap = "0.7.0"
csv-core = "0.1.10"
postcard = { version = "0.7.3", features = ["alloc"] }
nohash-hasher = "0.2.0"
[features]
profiling = ["tracing","tracing-subscriber","tracing-tracy","tracing-unwrap","tracy-client"]
[dev-dependencies]
criterion = { version = "0.3.5", features = ["real_blackbox"] }
rand = "0.8.4"
rand_distr = "0.4.2"
[dependencies.serde]
version = "1.0.133"
features = ["derive"]
[[bench]]
name = "dot_bench"
harness = false

29
rust/analyze_logs.py Normal file
View file

@ -0,0 +1,29 @@
import ujson
from glob import glob
import pandas as pd
from datetime import timedelta
route_info = {}
for log in glob("../logs/route_log*.json"):
name = log.split("route_log_")[1].rsplit(".", 1)[0]
data = ujson.load(open(log))
dt = data["dt"]
route_len = len(data["route"])
if route_len:
route_info[name] = (dt, route_len)
dt, route_len = route_info["beam_0"] # BFS as baseline
data = []
for name, (dt_o, l_o) in sorted(route_info.items(), key=lambda v: v[1][0] / v[1][1]):
dt_s = str(timedelta(seconds=round(dt_o, 2))).rstrip("0")
data.append(
{
"name": name,
"time": "{} ({:.2f}x)".format(dt_s, dt / dt_o),
"length": "{} (+{:.2%})".format(l_o, (l_o / route_len) - 1),
"time/hop": "{:.2} s".format(dt_o / l_o),
}
)
df = pd.DataFrame(data)
print(df.to_markdown(index=False))

142
rust/benches/dot_bench.rs Normal file
View file

@ -0,0 +1,142 @@
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use rand::Rng;
use rand_distr::StandardNormal;
fn rand_v3() -> [f32; 3] {
let mut rng = rand::thread_rng();
[
rng.sample(StandardNormal),
rng.sample(StandardNormal),
rng.sample(StandardNormal),
]
}
fn arand() -> f32 {
let mut rng = rand::thread_rng();
rng.sample::<f32, _>(StandardNormal).abs()
}
#[inline(always)]
fn veclen(v: &[f32; 3]) -> f32 {
(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]).sqrt()
}
#[inline(always)]
fn dist2(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
let dx = p1[0] - p2[0];
let dy = p1[1] - p2[1];
let dz = p1[2] - p2[2];
dx * dx + dy * dy + dz * dz
}
#[inline(always)]
fn dist(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
dist2(p1, p2).sqrt()
}
/// Dot product (cosine of angle) between two 3D vectors
#[inline(always)]
pub fn ndot_vec_dist(u: &[f32; 3], v: &[f32; 3]) -> f32 {
let z: [f32; 3] = [0.0; 3];
let lm = dist(u, &z) * dist(v, &z);
((u[0] * v[0]) + (u[1] * v[1]) + (u[2] * v[2])) / lm
}
/// Dot product (cosine of angle) between two 3D vectors
#[inline(always)]
pub fn ndot_vec_len(u: &[f32; 3], v: &[f32; 3]) -> f32 {
let lm = veclen(u) * veclen(v);
((u[0] * v[0]) + (u[1] * v[1]) + (u[2] * v[2])) / lm
}
#[inline(always)]
pub fn ndot_iter(u: &[f32; 3], v: &[f32; 3]) -> f32 {
let mut l_u = 0.0;
let mut l_v = 0.0;
let mut l_s = 0.0;
for (u, v) in u.iter().zip(v.iter()) {
l_s += u * v;
l_u += u * u;
l_v += v * v;
}
l_s / (l_u * l_v).sqrt()
}
fn bench_ndot(c: &mut Criterion) {
let mut g = c.benchmark_group("ndot");
g.bench_function("vec_dist", |b| {
b.iter_batched(
|| (rand_v3(), rand_v3()),
|(v1, v2)| ndot_vec_dist(&v1, &v2),
BatchSize::SmallInput,
);
});
g.bench_function("vec_len", |b| {
b.iter_batched(
|| (rand_v3(), rand_v3()),
|(v1, v2)| ndot_vec_len(&v1, &v2),
BatchSize::SmallInput,
);
});
g.bench_function("iter", |b| {
b.iter_batched(
|| (rand_v3(), rand_v3()),
|(v1, v2)| ndot_iter(&v1, &v2),
BatchSize::SmallInput,
);
});
g.finish();
}
fn bench_dist(c: &mut Criterion) {
let mut g = c.benchmark_group("dist");
g.bench_function("dist", |b| {
b.iter_batched(
|| (rand_v3(), rand_v3()),
|(v1, v2)| dist(&v1, &v2),
BatchSize::SmallInput,
);
});
g.bench_function("dist2", |b| {
b.iter_batched(
|| (rand_v3(), rand_v3()),
|(v1, v2)| dist2(&v1, &v2),
BatchSize::SmallInput,
);
});
g.finish();
}
fn vsub(a: &[f32; 3], b: &[f32; 3]) -> [f32; 3] {
[a[0] - b[0], a[1] - b[1], a[2] - b[2]]
}
pub fn h_old(node: &[f32; 3], m: f32, goal: &[f32; 3], r: f32) -> f32 {
(dist(node, goal) - (r * m)).max(0.0)
}
pub fn h_new(node: &[f32; 3], next: &[f32; 3], goal: &[f32; 3]) -> f32 {
-ndot_iter(&vsub(node, goal), &vsub(node, next)).acos()
}
fn bench_new_heur(c: &mut Criterion) {
c.bench_function("old_heuristic", |b| {
b.iter_batched(
|| (rand_v3(), arand(), rand_v3(), arand()),
|(node, m, goal, range)| h_old(&node, m, &goal, range),
BatchSize::SmallInput,
);
});
c.bench_function("new_heuristic", |b| {
b.iter_batched(
|| (rand_v3(), rand_v3(), rand_v3()),
|(v1, v2, v3)| h_new(&v1, &v2, &v3),
BatchSize::SmallInput,
);
});
}
criterion_group!(benches, bench_ndot, bench_dist, bench_new_heur);
criterion_main!(benches);

File diff suppressed because it is too large Load diff

0
rust/deps.svg Normal file
View file

46
rust/multi_test.py Normal file
View file

@ -0,0 +1,46 @@
import os
def setup_logging(loglevel="INFO"):
import logging
import coloredlogs
import datetime
coloredlogs.DEFAULT_FIELD_STYLES["delta"] = {"color": "green"}
coloredlogs.DEFAULT_FIELD_STYLES["levelname"] = {"color": "yellow"}
class DeltaTimeFormatter(coloredlogs.ColoredFormatter):
def format(self, record):
seconds = record.relativeCreated / 1000
duration = datetime.timedelta(seconds=seconds)
record.delta = str(duration)
return super().format(record)
coloredlogs.ColoredFormatter = DeltaTimeFormatter
logfmt = " | ".join(
["[%(delta)s] %(levelname)s", "%(name)s:%(pathname)s:%(lineno)s", "%(message)s"]
)
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError("Invalid log level: %s" % loglevel)
coloredlogs.install(level=numeric_level, fmt=logfmt)
setup_logging()
_ed_lrr = __import__("_ed_lrr")
r = _ed_lrr.PyRouter(None)
r.load("stars.csv")
# r.run_bfs(48)
r.test(48)
exit()
_ed_lrr.PyRouter.preprocess_galaxy("E:/EDSM/galaxy.json.gz", "E:/EDSM/stars.csv")
exit()
r = _ed_lrr.PyRouter(print)
r.load("../stars.csv")
systems = r.resolve_systems((0, 0, 0), "Colonia", 18627)
print(systems)
print(systems[0, 0, 0])

0
rust/route_log.txt Normal file
View file

221
rust/run_test.py Normal file
View file

@ -0,0 +1,221 @@
import subprocess as SP
import sys
from datetime import datetime, timedelta
import os
import shutil
import json
def setup_logging(loglevel="INFO"):
import logging
import coloredlogs
coloredlogs.DEFAULT_FIELD_STYLES["delta"] = {"color": "green"}
coloredlogs.DEFAULT_FIELD_STYLES["levelname"] = {"color": "yellow"}
class DeltaTimeFormatter(coloredlogs.ColoredFormatter):
def format(self, record):
seconds = record.relativeCreated / 1000
duration = timedelta(seconds=seconds)
record.delta = str(duration)
return super().format(record)
coloredlogs.ColoredFormatter = DeltaTimeFormatter
logfmt = " | ".join(
["[%(delta)s] %(levelname)s", "%(name)s:%(pathname)s:%(lineno)s", "%(message)s"]
)
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError("Invalid log level: %s" % loglevel)
coloredlogs.install(level=numeric_level, fmt=logfmt)
setup_logging()
JUMP_RANGE = 48
globals().setdefault("__file__", r"D:\devel\rust\ed_lrr_gui\rust\run_test.py")
dirname = os.path.dirname(__file__) or "."
os.chdir(dirname)
t_start = datetime.now()
os.environ["PYO3_PYTHON"] = sys.executable
if "--clean" in sys.argv[1:]:
SP.check_call(["cargo","clean"])
if "--build" in sys.argv[1:]:
SP.check_call(["cargo","lcheck"])
SP.check_call([sys.executable, "-m", "pip", "install", "-e", ".."])
print("Build+Install took:", datetime.now() - t_start)
sys.path.append("..")
_ed_lrr = __import__("_ed_lrr")
def callback(state):
print(state)
print(_ed_lrr)
r = _ed_lrr.PyRouter(callback)
r.load("../stars_2.csv", immediate=False)
print(r)
r.str_tree_test()
exit()
r = _ed_lrr.PyRouter(callback)
r.load("../stars.csv", immediate=False)
print(r.resolve("Sol","Saggitarius A","Colonia","Merope"))
exit()
ships = _ed_lrr.PyShip.from_journal()
r = _ed_lrr.PyRouter(callback)
r.load("../stars.csv", immediate=False)
def func(*args,**kwargs):
print(kwargs)
return 12
r.precompute_neighbors(JUMP_RANGE)
exit()
# start, end = "Sol", "Colonia" # # 135 in 22m 36s 664ms 268us 800ns
"""
{'mode': 'BFS_serial', 'system': 'Nuwo OP-N c23-1', 'from': 'Sol', 'to': 'Beagle Point', 'depth': 492, 'queue_size': 1602, 'd_rem': 2456.31298828125, 'd_total': 65279.3515625, 'prc_done': 96.23722839355469, 'n_seen': 17366296, 'prc_seen': 26.25494384765625}
[0:43:19.715858] INFO | _ed_lrr.route:src\route.rs:2402 | Took: 34m 38s 40ms 256us 500ns
"""
"""
{'mode': 'BFS_serial', 'system': 'Syriae Thaa DN-B d13-2', 'from': 'Sol', 'to': 'Beagle Point', 'depth': 521, 'queue_size': 2311, 'd_rem': 492.8757019042969, 'd_total': 65279.3515625, 'prc_done': 99.2449722290039, 'n_seen': 19566797, 'prc_seen': 29.58173179626465}
[0:53:28.431326] INFO | _ed_lrr.route:src\route.rs:2402 | Took: 48m 34s 958ms 326us 300ns
"""
"""
[0:36:02.738233] INFO | _ed_lrr.route:src\route.rs:2404 | Took: 27m 6s 216ms 161us 100ns
Optimal route: 534
"""
"""
Sol, Colonia
Took: 30m 22s 63ms 818us
Allocs: 26622742
Reallocs: 45809664
Deallocs: 26622600
Optimal route: 135
"""
"""
Sol, Ix
Took: 1s 995ms 115us 100ns
Allocs: 17058
Reallocs: 32042
Deallocs: 17047
Optimal route: 4
"""
# Stats { allocations: 23257531, deallocations: 23257389, reallocations: 42747420, bytes_allocated: 179667997387, bytes_deallocated: 179667853217, bytes_reallocated: 151573742821 }
start, end = "Sol", "Colonia"
systems = r.resolve(start, end)
sys_ids = {k: v["id"] for k, v in systems.items()}
cfg = {}
cfg["mode"] = "incremental_broadening"
# input("{}>".format(os.getpid()))
route = r.route([sys_ids[start], sys_ids[end]], JUMP_RANGE, cfg, 0)
print("Optimal route:", len(route))
# cfg["mode"] = "beam_stack"
# route = r.route([sys_ids[start], sys_ids[end]], JUMP_RANGE, cfg, 0)
exit()
# bw_l = [
# 1,
# 2,
# 4,
# 8,
# 16,
# 32,
# 64,
# 128,
# 256,
# 512,
# 1024,
# 2048,
# 4096,
# 8192,
# 16384,
# 0.1,
# 0.25,
# 0.5,
# 0.75,
# 0.9,
# 0.99,
# 0,
# ]
# cfg = {
# "mode": "bfs",
# "greedyness": 0,
# }
# bw_l = [0]
# for bw in bw_l:
# ofn = "../logs/route_log_beam_{}.txt".format(bw)
# # if os.path.isfile(ofn):
# # continue
# print(ofn)
# t_start = datetime.today()
# try:
# if isinstance(bw, int):
# cfg["beam_width"] = {"absolute": bw}
# else:
# cfg["beam_width"] = {"fraction": bw}
# route = r.route([sys_ids["Sol"], sys_ids["Beagle Point"]], JUMP_RANGE, cfg, 8)
# print(route)
# except Exception as e:
# print("Error:", e)
# route = []
# dt = (datetime.today() - t_start).total_seconds()
# shutil.copy("route_log.txt", ofn)
# with open(ofn.replace(".txt", ".json"), "w") as of:
# json.dump({"route": route, "dt": dt}, of)
# g_l = [1.0, 0.99, 0.9, 0.75, 0.5, 0.25]
# g_l.clear()
# cfg["beam_width"] = 0
# for g in g_l:
# ofn = "../logs/route_log_g_{}.txt".format(g)
# if os.path.isfile(ofn):
# continue
# print(ofn)
# t_start = datetime.today()
# try:
# cfg["greedyness"] = g
# route = r.route([sys_ids["Sol"], sys_ids["Beagle Point"]], JUMP_RANGE, cfg)
# except Exception as e:
# print("Error:", e)
# route = []
# dt = (datetime.today() - t_start).total_seconds()
# shutil.copy("route_log.txt", ofn)
# with open(ofn.replace(".txt", ".json"), "w") as of:
# json.dump({"route": route, "dt": dt}, of)
# r.unload()
# exit()
# os.chdir("..")
# SP.check_call(
# [
# "conda",
# "run",
# "-n",
# "base",
# "--no-capture-output",
# "python",
# "plot_heatmap_vaex.py",
# "logs/route_log_*.txt",
# ],
# shell=True,
# )

View file

@ -1,13 +1,257 @@
use crate::route::Router;
//! # Common utlility functions
use crate::route::{LineCache, Router};
use bincode::Options;
use crossbeam_channel::{bounded, Receiver};
use csv::ByteRecord;
use dict_derive::IntoPyObject;
use pyo3::conversion::ToPyObject;
use eyre::Result;
use log::*;
use pyo3::prelude::*;
use pyo3::types::{PyDict, PyTuple};
use pyo3::types::PyDict;
use pyo3::{conversion::ToPyObject, create_exception};
use pythonize::depythonize;
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::path::PathBuf;
use sha3::{Digest, Sha3_256};
use std::collections::{BTreeMap, HashMap, HashSet, VecDeque};
use std::hash::{Hash, Hasher, BuildHasherDefault};
use std::io::Write;
use std::ops::{Deref, DerefMut};
use std::path::Path;
use std::str::FromStr;
use std::thread;
use std::{cmp::Ordering, cmp::Reverse, collections::BinaryHeap};
use std::{
fs::File,
io::{BufReader, BufWriter},
path::PathBuf,
};
use nohash_hasher::NoHashHasher;
use thiserror::Error;
#[inline(always)]
pub fn heuristic(range: f32, node: &TreeNode, goal: &TreeNode) -> f32 {
// distance remaining after jumping from node towards goal
let a2 = dist2(&node.pos, &goal.pos);
let mult=node.get_mult();
let b2 = range * range * mult*mult;
return (a2 - b2).max(0.0);
}
/// Min-heap priority queue using f32 as priority
pub struct MinFHeap<T: Ord>(pub BinaryHeap<(Reverse<F32>, T)>);
/// Max-heap priority queue using f32 as priority
pub struct MaxFHeap<T: Ord>(pub BinaryHeap<(F32, T)>);
impl<T: Ord> MaxFHeap<T> {
/// Create new, empty priority queue
pub fn new() -> Self {
MaxFHeap(BinaryHeap::new())
}
/// push value `item` with priority `w` into queue
pub fn push(&mut self, w: f32, item: T) {
self.0.push((F32(w), item))
}
/// Remove and return largest item and priority
pub fn pop(&mut self) -> Option<(f32, T)> {
self.0.pop().map(|(F32(w), item)| (w, item))
}
}
impl<T: Ord> Default for MaxFHeap<T> {
fn default() -> Self {
return MaxFHeap(BinaryHeap::new());
}
}
impl<T: Ord> Deref for MaxFHeap<T> {
type Target = BinaryHeap<(F32, T)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T: Ord> DerefMut for MaxFHeap<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T: Ord> MinFHeap<T> {
/// Create new, empty priority queue
pub fn new() -> Self {
MinFHeap(BinaryHeap::new())
}
/// push value `item` with priority `w` into queue
pub fn push(&mut self, w: f32, item: T) {
self.0.push((Reverse(F32(w)), item))
}
/// Remove and return smallest item and priority
pub fn pop(&mut self) -> Option<(f32, T)> {
self.0.pop().map(|(Reverse(F32(w)), item)| (w, item))
}
}
impl<T: Ord> Default for MinFHeap<T> {
fn default() -> Self {
return MinFHeap(BinaryHeap::new());
}
}
impl<T: Ord> Deref for MinFHeap<T> {
type Target = BinaryHeap<(Reverse<F32>, T)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T: Ord> DerefMut for MinFHeap<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
/// ED LRR error type
#[derive(Error, Debug)]
pub enum EdLrrError {
#[error("failed to compute route from {from:?} to {to:?}: {reason}")]
RouteError {
from: Option<System>,
to: Option<System>,
reason: String,
},
#[error("failed to find system matching {0:?}")]
ResolveError(String),
#[error("runtime error: {0:?}")]
RuntimeError(String),
#[error("Failed to process {0}")]
ProcessingError(PathBuf),
#[error(transparent)]
EvalError(#[from] eval::Error),
#[error(transparent)]
CSVError(#[from] csv::Error),
#[error(transparent)]
IOError(#[from] std::io::Error),
#[error(transparent)]
BincodeError(#[from] Box<bincode::ErrorKind>),
#[error(transparent)]
PyError(#[from] pyo3::PyErr),
#[error(transparent)]
Error(#[from] eyre::Error),
#[error("unknown error")]
Unknown,
}
pub mod py_exceptions {
use super::*;
pub use pyo3::exceptions::*;
create_exception!(_ed_lrr, RouteError, PyException);
create_exception!(_ed_lrr, ResolveError, PyException);
create_exception!(_ed_lrr, EdLrrException, PyException);
create_exception!(_ed_lrr, ProcessingError, PyException);
create_exception!(_ed_lrr, FileFormatError, PyException);
}
impl FromStr for EdLrrError {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self::RuntimeError(s.to_owned()))
}
}
impl std::convert::From<String> for EdLrrError {
fn from(s: String) -> Self {
Self::RuntimeError(s)
}
}
impl std::convert::From<EdLrrError> for PyErr {
fn from(err: EdLrrError) -> PyErr {
match err {
EdLrrError::PyError(e) => e,
EdLrrError::BincodeError(..) => {
py_exceptions::FileFormatError::new_err(err.to_string())
}
EdLrrError::RouteError { .. } => py_exceptions::RouteError::new_err(err.to_string()),
EdLrrError::RuntimeError(msg) => py_exceptions::PyRuntimeError::new_err(msg),
EdLrrError::ResolveError(..) => py_exceptions::PyRuntimeError::new_err(err.to_string()),
EdLrrError::EvalError(err) => py_exceptions::PyRuntimeError::new_err(err.to_string()),
EdLrrError::CSVError(err) => py_exceptions::PyRuntimeError::new_err(err.to_string()),
EdLrrError::IOError(err) => py_exceptions::PyIOError::new_err(err.to_string()),
EdLrrError::Error(err) => py_exceptions::EdLrrException::new_err(err.to_string()),
EdLrrError::ProcessingError(buf) => {
py_exceptions::ProcessingError::new_err(format!("{}", buf.display()))
}
EdLrrError::Unknown => {
py_exceptions::EdLrrException::new_err("Unknown error!".to_string())
}
}
}
}
pub type EdLrrResult<T> = Result<T, EdLrrError>;
/// f32 compare wrapper
pub fn fcmp(a: f32, b: f32) -> Ordering {
match (a, b) {
(x, y) if x.is_nan() && y.is_nan() => Ordering::Equal,
(x, _) if x.is_nan() => Ordering::Greater,
(_, y) if y.is_nan() => Ordering::Less,
(..) => a.partial_cmp(&b).unwrap(),
}
}
/// f32 warpper type implementing `Eq` and `Ord`
#[derive(Debug)]
pub struct F32(pub f32);
impl PartialEq for F32 {
fn eq(&self, other: &F32) -> bool {
fcmp(self.0, other.0) == std::cmp::Ordering::Equal
}
}
impl Eq for F32 {}
impl PartialOrd for F32 {
fn partial_cmp(&self, other: &F32) -> Option<std::cmp::Ordering> {
Some(fcmp(self.0, other.0))
}
}
impl Ord for F32 {
fn cmp(&self, other: &F32) -> std::cmp::Ordering {
fcmp(self.0, other.0)
}
}
impl Deref for F32 {
type Target = f32;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for F32 {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
/// Returns additional jump range (in Ly) granted by specified class of Guardian FSD Booster
pub fn get_fsd_booster_info(class: usize) -> Result<f32, String> {
// Data from https://elite-dangerous.fandom.com/wiki/Guardian_Frame_Shift_Drive_Booster
let ret = match class {
@ -22,6 +266,7 @@ pub fn get_fsd_booster_info(class: usize) -> Result<f32, String> {
return Ok(ret);
}
/// Returns optimal mass and maximum fuel per jump for the given FSD rating and class as a hash map
pub fn get_fsd_info(rating: usize, class: usize) -> Result<HashMap<String, f32>, String> {
let mut ret = HashMap::new();
// Data from https://elite-dangerous.fandom.com/wiki/Frame_Shift_Drive#Specifications
@ -68,6 +313,7 @@ pub fn get_fsd_info(rating: usize, class: usize) -> Result<HashMap<String, f32>,
return Ok(ret);
}
/// Returns jump range multiplier for the specified star type (4 for neutron stars, 1.5 for white dwarfs and 1.0 otherwise)
pub fn get_mult(star_type: &str) -> f32 {
if star_type.contains("White Dwarf") {
return 1.5;
@ -78,133 +324,403 @@ pub fn get_mult(star_type: &str) -> f32 {
1.0
}
#[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum BeamWidth {
Absolute(usize),
Fraction(f32),
Radius(f32),
Infinite,
}
impl std::fmt::Display for BeamWidth {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
BeamWidth::Absolute(n) => write!(f, "{}", n),
BeamWidth::Fraction(v) => write!(f, "{}%", (*v) * 100.0),
BeamWidth::Radius(r) => write!(f, "{} Ly", r),
BeamWidth::Infinite => write!(f, "Infinite"),
}?;
Ok(())
}
}
impl Default for BeamWidth {
fn default() -> Self {
Self::Infinite
}
}
impl FromPyObject<'_> for BeamWidth {
fn extract(ob: &PyAny) -> PyResult<Self> {
depythonize(ob).map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(format!("{}", e)))
}
}
impl BeamWidth {
pub fn is_set(&self) -> bool {
match self {
Self::Fraction(f) => *f > 0.0,
Self::Absolute(n) => *n != 0,
Self::Radius(r) => *r > 0.0,
Self::Infinite => false,
}
}
pub fn is_infinite(&self) -> bool {
matches!(self, Self::Infinite)
}
pub fn compute(&self, nodes: usize) -> usize {
match self {
Self::Fraction(f) => {
let w = (nodes as f32) * f.max(0.0).min(1.0);
return (w.ceil() as usize).max(1);
}
Self::Absolute(n) => *n,
Self::Radius(_) | Self::Infinite => nodes,
}
}
}
/// Represents an uresolved system to be searched for by name, id or position
#[derive(Debug, FromPyObject)]
pub enum SysEntry {
ID(u32),
Name(String),
Pos((f32, f32, f32)),
}
impl SysEntry {
pub fn parse(s: &str) -> Self {
if let Ok(n) = s.parse() {
SysEntry::ID(n)
} else {
SysEntry::Name(s.to_owned())
impl ToPyObject for SysEntry {
fn to_object(&self, py: Python) -> PyObject {
match self {
Self::ID(id) => id.to_object(py),
Self::Name(name) => name.to_object(py),
Self::Pos(pos) => pos.to_object(py),
}
}
}
pub fn find_matches(
path: &PathBuf,
pub fn grid_stats(
path: &Path,
grid_size: f32,
) -> Result<BTreeMap<(i64, i64, i64), Vec<u32>>, String> {
let mut reader = match csv::ReaderBuilder::new().has_headers(false).from_path(path) {
Ok(rdr) => rdr,
Err(e) => {
return Err(format!("Error opening {}: {}", path.to_str().unwrap(), e));
}
};
let systems = reader.deserialize::<System>().map(Result::unwrap);
let mut ret: BTreeMap<(i64, i64, i64), Vec<u32>> = BTreeMap::new();
for sys in systems {
let k = (
((sys.pos[0] / grid_size).round() * grid_size) as i64,
((sys.pos[1] / grid_size).round() * grid_size) as i64,
((sys.pos[2] / grid_size).round() * grid_size) as i64,
);
ret.entry(k).or_default().push(sys.id);
}
Ok(ret)
}
pub enum Node {
Start,
Goal,
ID(u32),
}
pub enum Weight {
Dist(Node),
Depth,
}
impl Weight {
fn eval(&self) -> f32 {
todo!()
}
}
struct Weights(Vec<(f32, Weight)>);
impl Weights {
fn new() -> Self {
Self(vec![])
}
fn add(&mut self, w: f32, v: Weight) {
self.0.push((w, v));
}
fn eval(&mut self) -> f32 {
self.0.iter().map(|(w, v)| w * v.eval()).sum()
}
}
#[inline(always)]
pub fn dist2(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
let dx = p1[0] - p2[0];
let dy = p1[1] - p2[1];
let dz = p1[2] - p2[2];
dx * dx + dy * dy + dz * dz
}
#[inline(always)]
pub fn dist(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
dist2(p1, p2).sqrt()
}
#[inline(always)]
pub fn distm(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
let dx = (p1[0] - p2[0]).abs();
let dy = (p1[1] - p2[1]).abs();
let dz = (p1[2] - p2[2]).abs();
dx + dy + dz
}
/// Dot product (cosine of angle) between two 3D vectors
#[inline(always)]
pub fn ndot(u: &[f32; 3], v: &[f32; 3]) -> f32 {
let z: [f32; 3] = [0.0; 3];
let lm = dist(u, &z) * dist(v, &z);
(u[0] * v[0]) / lm + (u[1] * v[1]) / lm + (u[2] * v[2]) / lm
}
/// Fuzzy string matcher, use to resolve star system names
#[cfg_attr(feature = "profiling", tracing::instrument(skip(rx)))]
fn matcher(
rx: Receiver<ByteRecord>,
names: Vec<String>,
exact: bool,
) -> Result<HashMap<String, (f64, Option<System>)>, String> {
let mut best: HashMap<String, (f64, Option<System>)> = HashMap::new();
) -> HashMap<String, (f64, Option<u32>)> {
let mut best: HashMap<String, (f64, Option<u32>)> = HashMap::new();
for name in &names {
best.insert(name.to_string(), (0.0, None));
}
let names_u8: Vec<(String, _)> = names.iter().map(|n| (n.clone(), n.as_bytes())).collect();
let sdist = eddie::slice::Levenshtein::new();
for sys in rx.into_iter() {
for (name, name_b) in &names_u8 {
if let Some(ent) = best.get_mut(name) {
if (ent.0 - 1.0).abs() < std::f64::EPSILON {
continue;
}
if exact && (&sys[1] == *name_b) {
let id = std::str::from_utf8(&sys[0]).unwrap().parse().unwrap();
*ent = (1.0, Some(id));
continue;
}
let d = sdist.similarity(&sys[1], name_b);
if d > ent.0 {
let id = std::str::from_utf8(&sys[0]).unwrap().parse().unwrap();
*ent = (d, Some(id));
}
};
}
}
best
}
/// Scan through the csv file at `path` and return a hash map
/// mapping the strings from `names` to a tuple `(score, Option<system_id>)`.
/// Scoring matching uses the normalized Levenshtein distance where 1.0 is an exact match.
pub fn find_matches(
path: &Path,
names: Vec<String>,
exact: bool,
) -> Result<HashMap<String, (f64, Option<u32>)>, String> {
let mut best: HashMap<String, (f64, Option<u32>)> = HashMap::new();
if names.is_empty() {
return Ok(best);
}
for name in &names {
best.insert(name.to_string(), (0.0, None));
}
let mut reader = match csv::ReaderBuilder::new().from_path(path) {
let mut workers = Vec::new();
let ncpus = num_cpus::get();
let (tx, rx) = bounded(4096 * ncpus);
for _ in 0..ncpus {
let names = names.clone();
let rx = rx.clone();
let th = thread::spawn(move || matcher(rx, names, exact));
workers.push(th);
}
let mut rdr = match csv::ReaderBuilder::new().has_headers(false).from_path(path) {
Ok(rdr) => rdr,
Err(e) => {
return Err(format!("Error opening {}: {}", path.to_str().unwrap(), e));
}
};
let systems = reader.deserialize::<SystemSerde>();
for sys in systems {
let sys = sys.unwrap();
for name in &names {
let t_start = std::time::Instant::now();
let mut processed: usize = 0;
for record in rdr.byte_records().flat_map(|v| v.ok()) {
tx.send(record).unwrap();
processed += 1;
}
drop(tx);
while let Some(th) = workers.pop() {
for (name, (score, sys)) in th.join().unwrap().iter() {
best.entry(name.clone()).and_modify(|ent| {
if (exact) && (&sys.system == name) {
*ent = (1.0, Some(sys.clone().build()))
} else {
let d1 = strsim::normalized_levenshtein(&sys.system, &name);
let d2 = strsim::normalized_levenshtein(&sys.body, &name);
if d1 > ent.0 {
*ent = (d1, Some(sys.clone().build()))
} else if d2 > ent.0 {
*ent = (d2, Some(sys.clone().build()))
}
if score > &ent.0 {
*ent = (*score, *sys);
}
});
}
}
let dt = std::time::Instant::now() - t_start;
info!(
"Searched {} records in {:?}: {} records/second",
processed,
dt,
(processed as f64) / dt.as_secs_f64()
);
Ok(best)
}
/// Hash the contents of `path` with sha3 and return the hash as a vector of bytes
fn hash_file(path: &Path) -> Vec<u8> {
let mut hash_reader = BufReader::new(File::open(path).unwrap());
let mut hasher = Sha3_256::new();
std::io::copy(&mut hash_reader, &mut hasher).unwrap();
hasher.finalize().iter().copied().collect()
}
/// Construct and `O(1)` lookup index for the csv file at `path`.
/// The structure of the index is `(sha3, Vec<usize>)`
/// where the first element is the sha3 hash of the file the index belongs to
/// followed by a deltified vector where the entry at index `i` is the file offset for line `i` of the csv file.
pub fn build_index(path: &Path) -> std::io::Result<()> {
let file_hash = hash_file(path);
let mut wtr = BufWriter::new(File::create(path.with_extension("idx"))?);
let mut idx: Vec<u8> = Vec::new();
let mut records = (csv::ReaderBuilder::new()
.has_headers(false)
.from_path(path)?)
.into_deserialize::<System>();
let mut n: usize = 0;
let mut size;
idx.push(0);
loop {
n += 1;
if n % 100000 == 0 {
info!("{} Bodies processed", n);
}
let new_pos = records.reader().position().byte();
if records.next().is_none() {
break;
}
size = records.reader().position().byte() - new_pos;
idx.push(size as u8);
}
assert_eq!(idx.len(), n);
bincode::serialize_into(&mut wtr, &(file_hash, idx)).unwrap();
Ok(())
}
/// Node for R*-Tree
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct TreeNode {
/// System ID
pub id: u32,
/// Position in space
pub pos: [f32; 3],
pub mult: f32,
/// flags
/// 00 unscoopable
/// 01 scoopable
/// 10 white dward
/// 11 neutron star
pub flags: u8,
}
impl ToPyObject for TreeNode {
fn to_object(&self, py: Python) -> PyObject {
pythonize::pythonize(py, self).unwrap()
}
}
impl TreeNode {
pub fn get(&self, router: &Router) -> Option<System> {
let mut cache = router.cache.as_ref().unwrap().lock().unwrap();
cache.get(self.id)
/// Retrieve matching [System] for this tree node
pub fn get(&self, router: &Router) -> Result<Option<System>, String> {
router.get(self.id)
}
pub fn get_mult(&self) -> f32 {
match self.flags {
0b11 => 4.0,
0b10 => 1.5,
_ => 1.0
}
}
}
impl PartialEq for TreeNode {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for TreeNode {}
impl PartialOrd for TreeNode {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.id.cmp(&other.id))
}
}
impl Ord for TreeNode {
fn cmp(&self, other: &TreeNode) -> Ordering {
self.id.cmp(&other.id)
}
}
impl Hash for TreeNode {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}
/// Star system info read from CSV
#[derive(Debug, Clone, Serialize, Deserialize, IntoPyObject)]
pub struct SystemSerde {
pub id: u32,
pub star_type: String,
pub system: String,
pub body: String,
pub mult: f32,
pub distance: f32,
pub x: f32,
pub y: f32,
pub z: f32,
}
impl SystemSerde {
pub fn build(self) -> System {
System {
id: self.id,
star_type: self.star_type,
system: self.system,
body: self.body,
mult: self.mult,
distance: self.distance,
pos: [self.x, self.y, self.z],
}
}
pub fn to_node(&self) -> TreeNode {
TreeNode {
id: self.id,
pos: [self.x, self.y, self.z],
mult: self.mult,
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct System {
/// Unique System id
pub id: u32,
pub star_type: String,
pub system: String,
pub body: String,
/// Star system
pub name: String,
/// Number of bodies
pub num_bodies: u8,
/// Does the system have a scoopable star?
pub has_scoopable: bool,
/// Jump range multiplier (1.5 for white dwarfs, 4.0 for neutron stars, 1.0 otherwise)
pub mult: f32,
pub distance: f32,
/// Position
pub pos: [f32; 3],
}
impl System {
fn get_flags(&self) -> u8 {
let mut flags=0;
if self.mult==4.0 {
return 0b11
}
if self.mult==1.5 {
return 0b10
}
if self.has_scoopable {
return 0b01
}
return 0b00
}
}
impl ToPyObject for System {
fn to_object(&self, py: Python) -> PyObject {
let pos = PyTuple::new(py, self.pos.iter());
let elem = PyDict::new(py);
elem.set_item("star_type", self.star_type.clone()).unwrap();
elem.set_item("system", self.system.clone()).unwrap();
elem.set_item("body", self.body.clone()).unwrap();
elem.set_item("distance", self.distance).unwrap();
elem.set_item("mult", self.mult).unwrap();
elem.set_item("id", self.id).unwrap();
elem.set_item("pos", pos).unwrap();
elem.to_object(py)
let d = PyDict::new(py);
d.set_item("id", self.id).unwrap();
d.set_item("name", self.name.clone()).unwrap();
d.set_item("num_bodies", self.num_bodies).unwrap();
d.set_item("has_scoopable", self.has_scoopable).unwrap();
d.set_item("mult", self.mult).unwrap();
d.set_item("pos", (self.pos[0], self.pos[1], self.pos[2]))
.unwrap();
return d.to_object(py);
}
}
@ -213,7 +729,7 @@ impl System {
TreeNode {
id: self.id,
pos: self.pos,
mult: self.mult,
flags: self.get_flags(),
}
}
}
@ -229,3 +745,118 @@ impl PartialOrd for System {
Some(self.cmp(other))
}
}
#[derive(Debug)]
pub struct DQueue<T>(Vec<VecDeque<T>>);
impl<T> DQueue<T> {
pub fn new() -> Self {
Self(vec![])
}
pub fn enqueue(&mut self, depth: usize, item: T) {
self.0.resize_with(depth, VecDeque::new);
self.0[depth].push_back(item);
}
pub fn dequeue(&mut self, depth: usize) -> Option<T> {
self.0.resize_with(depth, VecDeque::new);
self.0[depth].pop_back()
}
}
impl<T> Default for DQueue<T> {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Default, Serialize, Deserialize)]
struct BKTreeNode {
ids: HashSet<u32,BuildHasherDefault<NoHashHasher<u32>>>,
children: HashMap<u8,Self,BuildHasherDefault<NoHashHasher<u8>>>
}
impl BKTreeNode {
fn new(data: &[String], dist: &eddie::str::Levenshtein) -> Self {
let mut tree= Self::default();
let mut max_depth=0;
(0..data.len()).map(|id| {
max_depth=max_depth.max(tree.insert(data,id as u32, dist,0));
if (id>0) && (id%100_000 == 0) {
println!("Inserting ID {}, Max Depth: {}",id,max_depth);
}
}).max();
println!("Max Depth: {}",max_depth);
tree
}
fn from_id(id: u32) -> Self {
let mut ret=Self::default();
ret.ids.insert(id);
return ret;
}
fn insert(&mut self, data: &[String],id: u32, dist: &eddie::str::Levenshtein, depth: usize) -> usize {
if self.is_empty() {
self.ids.insert(id);
return depth;
}
let idx = self.get_id().unwrap() as usize;
let self_key = data.get(idx).unwrap();
let ins_key = data.get(id as usize).unwrap();
let dist_key = dist.distance(self_key,ins_key) as u8;
if dist_key==0 {
self.ids.insert(id);
return depth;
}
if let Some(child) = self.children.get_mut(&dist_key) {
return child.insert(data,id,dist,depth+1);
} else {
self.children.insert(dist_key,Self::from_id(id));
return depth;
}
}
fn get_id(&self) -> Option<u32> {
self.ids.iter().copied().next()
}
fn is_empty(&self) -> bool {
return self.ids.is_empty();
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct BKTree {
base_id: u32,
root: BKTreeNode,
}
impl BKTree {
pub fn new(data: &[String], base_id: u32) -> Self {
let dist = eddie::str::Levenshtein::new();
let root = BKTreeNode::new(data, &dist);
Self {base_id,root}
}
pub fn id(&self) -> u32 {
self.base_id
}
pub fn dump(&self, fh: &mut BufWriter<File>) -> EdLrrResult<()> {
let options = bincode::DefaultOptions::new();
let amt = options.serialized_size(self)?;
println!("Writing {}",amt);
options.serialize_into(fh,self)?;
Ok(())
}
pub fn lookup(&self, name: &str) -> u32 {
todo!();
}
}

0
rust/src/dot_impls.rs Normal file
View file

View file

@ -1,196 +0,0 @@
use crate::common::get_mult;
use crate::common::SystemSerde;
use fnv::FnvHashMap;
use pyo3::prelude::*;
use serde::Deserialize;
use serde_json::Result;
use std::fs::File;
use std::io::Seek;
use std::io::{BufRead, BufReader, BufWriter, SeekFrom};
use std::path::PathBuf;
use std::str;
use std::time::Instant;
#[derive(Debug, Deserialize)]
#[allow(non_snake_case)]
struct Body {
name: String,
subType: String,
#[serde(rename = "type")]
body_type: String,
systemId: i32,
systemId64: i64,
#[serde(rename = "distanceToArrival")]
distance: f32,
}
#[derive(Debug, Deserialize)]
struct Coords {
x: f32,
y: f32,
z: f32,
}
#[derive(Debug, Deserialize)]
struct System {
id: i32,
id64: i64,
name: String,
coords: Coords,
}
#[derive(Debug)]
pub struct PreprocessState {
pub file: String,
pub message: String,
pub total: u64,
pub done: u64,
pub count: usize,
}
fn process(
path: &PathBuf,
func: &mut dyn for<'r> FnMut(&'r str) -> (),
callback: &dyn Fn(&PreprocessState) -> PyResult<PyObject>,
) -> std::io::Result<()> {
let mut buffer = String::new();
let fh = File::open(path)?;
let total_size = fh.metadata()?.len();
let mut t_last = Instant::now();
let mut reader = BufReader::new(fh);
let mut state = PreprocessState {
file: path.to_str().unwrap().to_owned(),
total: total_size,
done: 0,
count: 0,
message: format!("Processing {} ...", path.to_str().unwrap()),
};
println!("Loading {} ...", path.to_str().unwrap());
while let Ok(n) = reader.read_line(&mut buffer) {
if n == 0 {
break;
}
buffer = buffer.trim_end().trim_end_matches(|c| c == ',').to_string();
if !buffer.is_empty() {
func(&buffer);
}
let pos = reader.seek(SeekFrom::Current(0)).unwrap();
state.done = pos;
state.count += 1;
if t_last.elapsed().as_millis() > 100 {
callback(&state)?;
t_last = Instant::now();
}
buffer.clear();
}
Ok(())
}
fn process_systems(
path: &PathBuf,
callback: &dyn Fn(&PreprocessState) -> PyResult<PyObject>,
) -> FnvHashMap<i32, System> {
let mut ret = FnvHashMap::default();
process(
path,
&mut |line| {
let sys_res: Result<System> = serde_json::from_str(&line);
if let Ok(sys) = sys_res {
ret.insert(sys.id, sys);
} else {
eprintln!("\nError parsing: {}\n\t{:?}\n", line, sys_res.unwrap_err());
}
},
callback,
)
.unwrap();
ret
}
pub fn build_index(path: &PathBuf) -> std::io::Result<()> {
let mut wtr = BufWriter::new(File::create(path.with_extension("idx"))?);
let mut idx: Vec<u64> = Vec::new();
let mut records = (csv::Reader::from_path(path)?).into_deserialize::<SystemSerde>();
loop {
idx.push(records.reader().position().byte());
if records.next().is_none() {
break;
}
}
bincode::serialize_into(&mut wtr, &idx).unwrap();
Ok(())
}
fn process_bodies(
path: &PathBuf,
out_path: &PathBuf,
systems: &mut FnvHashMap<i32, System>,
callback: &dyn Fn(&PreprocessState) -> PyResult<PyObject>,
) -> std::io::Result<()> {
println!(
"Processing {} into {} ...",
path.to_str().unwrap(),
out_path.to_str().unwrap(),
);
let mut n: u32 = 0;
let mut wtr = csv::Writer::from_path(out_path)?;
process(
path,
&mut |line| {
if !line.contains("Star") {
return;
}
let body_res: Result<Body> = serde_json::from_str(&line);
if let Ok(body) = body_res {
if !body.body_type.contains("Star") {
return;
}
if let Some(sys) = systems.get(&body.systemId) {
let sub_type = body.subType;
let mult = get_mult(&sub_type);
let sys_name = sys.name.clone();
let rec = SystemSerde {
id: n,
star_type: sub_type,
system: sys_name,
body: body.name,
mult,
distance: body.distance,
x: sys.coords.x,
y: sys.coords.y,
z: sys.coords.z,
};
wtr.serialize(rec).unwrap();
n += 1;
};
} else {
eprintln!("\nError parsing: {}\n\t{:?}\n", line, body_res.unwrap_err());
}
},
callback,
)
.unwrap();
println!("Total Systems: {}", n);
systems.clear();
Ok(())
}
pub fn preprocess_files(
bodies: &PathBuf,
systems: &PathBuf,
out_path: &PathBuf,
callback: &dyn Fn(&PreprocessState) -> PyResult<PyObject>,
) -> std::io::Result<()> {
if !out_path.exists() {
let mut systems = process_systems(systems, &callback);
process_bodies(bodies, out_path, &mut systems, &callback)?;
} else {
println!(
"File '{}' exists, not overwriting it",
out_path.to_str().unwrap()
);
}
println!("Building index...");
println!("Index result: {:?}", build_index(&out_path));
Ok(())
}

View file

@ -1,40 +1,25 @@
use serde::{Deserialize, Serialize};
//! Spansh galaxy.json to csv converter
use crate::common::{get_mult, System};
use eyre::Result;
use flate2::bufread::GzDecoder;
use log::*;
use serde::Deserialize;
use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter};
use std::io::{BufRead, BufReader, BufWriter, Seek};
use std::path::Path;
use std::str;
#[derive(Debug, Clone, Serialize)]
pub struct SystemSerde {
pub id: u32,
pub star_type: String,
pub system: String,
pub body: String,
pub mult: f32,
pub distance: f32,
pub x: f32,
pub y: f32,
pub z: f32,
}
fn get_mult(star_type: &str) -> f32 {
if star_type.contains("White Dwarf") {
return 1.5;
}
if star_type.contains("Neutron") {
return 4.0;
}
1.0
}
#[derive(Debug, Deserialize)]
struct Coords {
#[derive(Debug, Deserialize, Clone)]
struct GalaxyCoords {
x: f32,
y: f32,
z: f32,
}
#[derive(Debug, Deserialize)]
struct Body {
#[derive(Debug, Deserialize, Clone)]
struct GalaxyBody {
name: String,
#[serde(rename = "type")]
body_type: String,
@ -44,70 +29,73 @@ struct Body {
distance: f32,
}
#[derive(Debug, Deserialize)]
struct System {
coords: Coords,
#[derive(Debug, Deserialize, Clone)]
struct GalaxySystem {
coords: GalaxyCoords,
name: String,
bodies: Vec<Body>,
bodies: Vec<GalaxyBody>,
}
pub fn process_galaxy_dump(path: &str) -> std::io::Result<()> {
let fh = File::create("stars.csv")?;
let mut wtr = csv::Writer::from_writer(BufWriter::new(fh));
/// Load compressed galaxy.json from `path` and write `stars.csv` to `out_path`
pub fn process_galaxy_dump(path: &Path, out_path: &Path) -> Result<()> {
let out_path = out_path.with_extension("csv");
let mut wtr = csv::WriterBuilder::new()
.has_headers(false)
.from_writer(BufWriter::new(File::create(out_path)?));
let mut buffer = String::new();
let mut bz2_reader = std::process::Command::new("7z")
.args(&["x", "-so", path])
.stdout(std::process::Stdio::piped())
.spawn()
.unwrap_or_else(|err| {
eprintln!("Failed to run 7z: {}", err);
eprintln!("Falling back to bzip2");
std::process::Command::new("bzip2")
.args(&["-d", "-c", path])
.stdout(std::process::Stdio::piped())
.spawn()
.expect("Failed to execute bzip2!")
});
let mut reader = BufReader::new(
bz2_reader
.stdout
.as_mut()
.expect("Failed to open stdout of child process"),
);
let mut count = 0;
let rdr = BufReader::new(File::open(path)?);
let mut reader = BufReader::new(GzDecoder::new(rdr));
let mut count: usize = 0;
let mut total: usize = 0;
let mut errors: usize = 0;
let mut bodies: usize = 0;
let mut systems = 0;
let max_len = File::metadata(reader.get_ref().get_ref().get_ref())?.len();
while let Ok(n) = reader.read_line(&mut buffer) {
if n == 0 {
break;
}
buffer = buffer
.trim()
.trim_end_matches(|c| c == ',')
.trim()
.trim_end_matches(|c: char| c == ',' || c.is_whitespace())
.to_string();
if !buffer.contains("Star") {
continue;
};
if let Ok(sys) = serde_json::from_str::<System>(&buffer) {
for b in &sys.bodies {
if b.body_type == "Star" {
let s = SystemSerde {
id: count,
star_type: b.sub_type.clone(),
distance: b.distance,
mult: get_mult(&b.sub_type),
body: b.name.clone(),
system: sys.name.clone(),
x: sys.coords.x,
y: sys.coords.y,
z: sys.coords.z,
};
wtr.serialize(s)?;
count += 1;
total += 1;
if let Ok(sys) = serde_json::from_str::<GalaxySystem>(&buffer) {
let mut sys_rec = System {
id: systems,
mult: 1.0,
name: sys.name,
num_bodies: 0,
pos: [sys.coords.x, sys.coords.y, sys.coords.z],
has_scoopable: false,
};
for b in sys.bodies.iter().filter(|b| b.body_type == "Star").cloned() {
sys_rec.mult = sys_rec.mult.max(get_mult(&b.sub_type));
sys_rec.num_bodies += 1;
for c in "KGBFOAM".chars() {
if b.sub_type.starts_with(c) {
sys_rec.has_scoopable |= true;
break;
}
}
}
bodies += sys_rec.num_bodies as usize;
systems += 1;
count += 1;
wtr.serialize(sys_rec)?;
if count % 100_000 == 0 {
let cur_pos = reader.get_ref().get_ref().get_ref().stream_position()?;
let prc: f64 = ((cur_pos as f64) / (max_len as f64)) * 100.0;
info!("[{:.2} %] {} systems written", prc, count);
}
} else {
errors += 1;
}
buffer.clear();
}
println!("Total: {}", count);
info!("Total: {}", total);
info!("Bodies: {}", bodies);
info!("Systems: {}", systems);
info!("Processed: {}", count);
info!("Errors: {}", errors);
Ok(())
}

View file

@ -1,6 +1,8 @@
//! Elite: Dangerous Journal Loadout even parser
use crate::common::get_fsd_info;
use crate::ship::Ship;
use eyre::Result;
use regex::Regex;
use serde::Deserialize;
use std::collections::HashMap;
@ -11,23 +13,23 @@ pub struct Event {
pub event: EventData,
}
#[serde(tag = "event")]
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[serde(tag = "event")]
pub enum EventData {
Loadout(Loadout),
#[serde(other)]
Unknown,
}
#[serde(rename_all = "PascalCase")]
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct Modifier {
label: String,
value: f32,
}
#[serde(rename_all = "PascalCase")]
#[derive(Clone, Debug, PartialEq, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct Engineering {
modifiers: Vec<Modifier>,
}
@ -72,55 +74,46 @@ impl Engineering {
impl Loadout {
fn get_booster(&self) -> Option<usize> {
self.modules
.iter()
.cloned()
.filter_map(|m| {
let Module { item, .. } = m;
if item.starts_with("int_guardianfsdbooster") {
return item
.chars()
.last()
.unwrap()
.to_digit(10)
.map(|v| v as usize);
}
return None;
})
.next()
self.modules.iter().cloned().find_map(|m| {
let Module { item, .. } = m;
if item.starts_with("int_guardianfsdbooster") {
return item
.chars()
.last()
.unwrap()
.to_digit(10)
.map(|v| v as usize);
}
return None;
})
}
fn get_fsd(&self) -> Option<(String, Option<Engineering>)> {
self.modules
.iter()
.cloned()
.filter_map(|m| {
let Module {
slot,
engineering,
item,
} = m;
if slot == "FrameShiftDrive" {
return Some((item, engineering));
}
return None;
})
.next()
self.modules.iter().cloned().find_map(|m| {
let Module {
slot,
engineering,
item,
} = m;
if slot == "FrameShiftDrive" {
return Some((item, engineering));
}
return None;
})
}
pub fn try_into_ship(self) -> Result<Ship, String> {
pub fn try_into_ship(self) -> Result<(String, Ship), String> {
let fsd = self.get_fsd().ok_or("No FSD found!")?;
let booster = self.get_booster().unwrap_or(0);
let fsd_type = Regex::new(r"^int_hyperdrive_size(\d+)_class(\d+)$")
.unwrap()
.captures(&fsd.0);
let fsd_type: (usize, usize) = fsd_type
.map(|m| {
.and_then(|m| {
let s = m.get(1)?.as_str().to_owned().parse().ok()?;
let c = m.get(2)?.as_str().to_owned().parse().ok()?;
return Some((c, s));
})
.flatten()
.ok_or(format!("Invalid FSD found: {}", &fsd.0))?;
let eng = fsd
.1
@ -141,18 +134,28 @@ impl Loadout {
let opt_mass = fsd_info
.get("FSDOptimalMass")
.ok_or(format!("Unknwon FSDOptimalMass for FSD: {}", &fsd.0))?;
return Ship::new(
self.ship_name,
let key = format!(
"[{}] {} ({})",
if !self.ship_name.is_empty() {
self.ship_name
} else {
"<NO NAME>".to_owned()
},
self.ship_ident,
self.ship,
self.unladen_mass,
self.fuel_capacity.main,
self.fuel_capacity.main,
fsd_type,
*max_fuel,
*opt_mass,
booster,
self.ship
);
return Ok((
key,
Ship::new(
self.unladen_mass,
self.fuel_capacity.main,
self.fuel_capacity.main,
fsd_type,
*max_fuel,
*opt_mass,
booster,
)?,
));
}
}

View file

@ -1,144 +1,379 @@
// #![deny(warnings)]
#![allow(dead_code, clippy::needless_return, clippy::too_many_arguments)]
//! # Elite: Danerous Long Range Router
pub mod common;
pub mod edsm;
pub mod galaxy;
pub mod journal;
pub mod mmap_csv;
#[cfg(feature = "profiling")]
pub mod profiling;
pub mod route;
pub mod search_algos;
pub mod ship;
use bincode::Options;
use csv::{Position, StringRecord};
use eddie::Levenshtein;
// =========================
use stats_alloc::{Region, StatsAlloc, INSTRUMENTED_SYSTEM};
use std::alloc::System as SystemAlloc;
use std::cell::RefMut;
use std::collections::BTreeMap;
use std::io::{BufWriter, Write};
use std::path::Path;
use std::time::Instant;
#[cfg(not(feature = "profiling"))]
#[global_allocator]
static GLOBAL: &StatsAlloc<SystemAlloc> = &INSTRUMENTED_SYSTEM;
// =========================
#[cfg(not(feature = "profiling"))]
mod profiling {
pub fn init() {}
}
extern crate derivative;
use crate::common::{find_matches, SysEntry};
use crate::common::{find_matches, grid_stats, EdLrrError, SysEntry, System};
#[cfg(feature = "profiling")]
use crate::profiling::*;
use crate::route::{Router, SearchState};
use crate::ship::Ship;
use eyre::Result;
#[cfg(not(feature = "profiling"))]
use log::*;
use pyo3::exceptions::*;
use pyo3::prelude::*;
use pyo3::types::{PyDict, PyList, PyTuple};
use pyo3::PyObjectProtocol;
use std::path::PathBuf;
use pyo3::types::{IntoPyDict, PyDict, PyTuple};
use pyo3::{create_exception, PyObjectProtocol};
use route::{LineCache, PyModeConfig};
use std::{
cell::RefCell, collections::HashMap, convert::TryInto, fs::File, io::BufReader, path::PathBuf,
};
#[cfg(feature = "profiling")]
#[global_allocator]
static GLOBAL: ProfiledAllocator<std::alloc::System> =
ProfiledAllocator::new(std::alloc::System, 1024);
create_exception!(_ed_lrr, RoutingError, PyException);
create_exception!(_ed_lrr, ProcessingError, PyException);
create_exception!(_ed_lrr, ResolveError, PyException);
#[derive(Debug)]
enum RangeOrShip {
Range(f32),
Ship(Ship),
}
impl FromPyObject<'_> for RangeOrShip {
fn extract(ob: &PyAny) -> PyResult<Self> {
if let Ok(n) = ob.extract() {
return Ok(Self::Range(n));
}
let s: PyShip = ob.extract()?;
return Ok(Self::Ship(s.ship));
}
}
#[pyclass(dict)]
#[derive(Debug)]
#[text_signature = "(callback, /)"]
#[pyo3(text_signature = "(callback, /)")]
struct PyRouter {
router: Router,
primary_only: bool,
stars_path: String,
stars_path: Option<String>,
}
impl PyRouter {
fn check_stars(&self) -> PyResult<PathBuf> {
self.stars_path
.as_ref()
.ok_or_else(|| PyErr::from(EdLrrError::RuntimeError("no stars.csv loaded".to_owned())))
.map(PathBuf::from)
}
}
#[pymethods]
impl PyRouter {
#[new]
#[args(callback = "None")]
fn new(callback: Option<PyObject>, py: Python<'static>) -> PyResult<Self> {
Ok(PyRouter {
router: Router::new(Box::new(
move |state: &SearchState| {
match callback.as_ref() {
Some(cb) => cb.call(py, (state.clone(),), None),
None => Ok(py.None()),
}
fn new(callback: Option<PyObject>) -> Self {
let mut router = Router::new();
if callback.is_some() {
router.set_callback(Box::new(move |state: &SearchState| {
let gil_guard = Python::acquire_gil();
let py = gil_guard.python();
match callback.as_ref() {
Some(cb) => cb.call(py, (state.clone(),), None),
None => Ok(py.None()),
}
)),
primary_only: false,
stars_path: String::from(""),
})
}))
}
PyRouter {
router,
stars_path: None,
}
}
#[text_signature = "(ship, /)"]
fn set_ship(&mut self, py: Python, ship: &PyShip) -> PyResult<PyObject> {
self.router.set_ship(ship.ship.clone());
#[args(primary_only = "false", immediate = "false")]
#[pyo3(text_signature = "(path, primary_only, /)")]
fn load(&mut self, path: String, py: Python, immediate: bool) -> PyResult<PyObject> {
self.stars_path = Some(path);
if immediate {
let stars_path = self.check_stars()?;
let route_res = self.router.load(&stars_path);
if let Err(err_msg) = route_res {
return Err(PyErr::new::<PyValueError, _>(err_msg));
};
}
Ok(py.None())
}
#[args(primary_only = "false")]
#[text_signature = "(path, primary_only, /)"]
fn load(
&mut self,
path: String,
primary_only: bool,
py: Python,
) -> PyResult<PyObject> {
self.stars_path = path;
self.primary_only = primary_only;
#[pyo3(text_signature = "(/)")]
fn unload(&mut self, py: Python) -> PyObject {
self.router.unload();
py.None()
}
fn plot(&mut self, py: Python) -> PyResult<PyObject> {
let stars_path = self.check_stars()?;
let route_res = self.router.load(&stars_path);
if let Err(err_msg) = route_res {
return Err(PyErr::new::<PyValueError, _>(err_msg));
};
let mut max_v = [0f32, 0f32, 0f32];
let mut min_v = [0f32, 0f32, 0f32];
for node in self.router.get_tree().iter() {
for i in 0..3 {
if node.pos[i] > max_v[i] {
max_v[i] = node.pos[i];
}
if node.pos[i] < min_v[i] {
min_v[i] = node.pos[i];
}
}
}
let plot_bbox: ((f32, f32), (f32, f32)) = ((min_v[0], max_v[0]), (min_v[2], max_v[2]));
Ok(plot_bbox.to_object(py))
}
fn run_bfs(&mut self, range: f32, py: Python) -> PyResult<PyObject> {
let stars_path = self.check_stars()?;
let route_res = self.router.load(&stars_path);
if let Err(err_msg) = route_res {
return Err(PyErr::new::<PyValueError, _>(err_msg));
};
self.router
.precomp_bfs(range)
.map_err(PyErr::new::<RoutingError, _>)
.map(|_| py.None())
}
fn precompute_graph(&mut self, range: f32, py: Python) -> PyResult<PyObject> {
let stars_path = self.check_stars()?;
let route_res = self.router.load(&stars_path);
if let Err(err_msg) = route_res {
return Err(PyErr::new::<PyValueError, _>(err_msg));
};
self.router
.precompute_graph(range)
.map_err(PyErr::new::<RoutingError, _>)
.map(|_| py.None())
}
fn nb_perf_test(&mut self, range: f32, py: Python) -> PyResult<PyObject> {
let stars_path = self.check_stars()?;
let route_res = self.router.load(&stars_path);
if let Err(err_msg) = route_res {
return Err(PyErr::new::<PyValueError, _>(err_msg));
};
let mut nbmap = BTreeMap::new();
let tree = self.router.get_tree();
let total_nodes = tree.size();
for (n, node) in tree.iter().enumerate() {
let nbs = self
.router
.neighbours(node, range)
.map(|nb| nb.id)
.collect::<Vec<_>>();
nbmap.insert(node.id, nbs);
if n % 100_000 == 0 {
println!("{}/{}", n, total_nodes);
}
}
println!("{}", nbmap.len());
Ok(py.None())
}
#[args(greedyness = "0.5", num_workers = "0", beam_width = "0")]
#[text_signature = "(hops, range, greedyness, beam_width, num_workers, /)"]
fn precompute_neighbors(&mut self, range: f32, py: Python) -> PyResult<PyObject> {
let stars_path = self.check_stars()?;
let route_res = self.router.load(&stars_path);
if let Err(err_msg) = route_res {
return Err(PyErr::new::<PyValueError, _>(err_msg));
};
self.router
.precompute_all(range)
.map_err(PyErr::new::<RoutingError, _>)
.map(|_| py.None())
}
#[args(
greedyness = "0.5",
max_dist = "0.0",
num_workers = "0",
beam_width = "BeamWidth::Absolute(0)"
)]
#[pyo3(text_signature = "(hops, range, mode, num_workers, /)")]
fn route(
&mut self,
hops: &PyList,
range: Option<f32>,
greedyness: f32,
beam_width: usize,
hops: Vec<SysEntry>,
range: RangeOrShip,
mode: Option<PyModeConfig>,
num_workers: usize,
py: Python,
) -> PyResult<PyObject> {
let route_res = self
.router
.load(&PathBuf::from(self.stars_path.clone()), self.primary_only);
) -> PyResult<Vec<common::System>> {
let stars_path = self.check_stars()?;
let route_res = self.router.load(&stars_path);
if let Err(err_msg) = route_res {
return Err(PyErr::new::<ValueError, _>(err_msg));
return Err(PyErr::new::<PyValueError, _>(err_msg));
};
let mut sys_entries: Vec<SysEntry> = Vec::new();
for hop in hops {
if let Ok(id) = hop.extract() {
sys_entries.push(SysEntry::ID(id));
} else {
sys_entries.push(SysEntry::parse(hop.extract()?));
}
}
println!("Resolving systems...");
let ids: Vec<u32> = match resolve(&sys_entries, &self.router.path) {
Ok(ids) => ids,
info!("Resolving systems...");
let ids: Vec<u32> = match resolve(&hops, &self.router.path, true) {
Ok(sytems) => sytems.into_iter().map(|id| id.into_id()).collect(),
Err(err_msg) => {
return Err(PyErr::new::<ValueError, _>(err_msg));
return Err(EdLrrError::ResolveError(err_msg).into());
}
};
match self
.router
.compute_route(&ids, range, greedyness, beam_width, num_workers)
{
Ok(route) => {
let py_route: Vec<_> = route.iter().map(|hop| hop.to_object(py)).collect();
Ok(py_route.to_object(py))
let mut is_default = false;
let mut is_ship = false;
info!("{:?}", mode);
let mut mode = match mode {
Some(mode) => mode,
None => {
let mode = PyModeConfig::default();
is_default = true;
mode
}
};
if mode.mode.is_empty() {
if mode.ship.is_none() {
mode.mode = "bfs".to_string();
} else {
mode.mode = "ship".to_string();
if mode.ship_mode == *"" {
mode.ship_mode = "jumps".to_string();
}
}
Err(err_msg) => Err(PyErr::new::<RuntimeError, _>(err_msg)),
}
let range = match range {
RangeOrShip::Range(r) => Some(r),
RangeOrShip::Ship(ship) => {
mode.mode = "ship".into();
mode.ship = Some(ship);
is_ship = true;
None
}
};
info!("{:?}", mode);
let mode = mode.try_into()?;
if is_default && !is_ship {
warn!("no mode specified, defaulting to {}", mode);
}
#[cfg(not(feature = "profiling"))]
let reg = Region::new(GLOBAL);
let res = match self.router.compute_route(&ids, range, mode, num_workers) {
Ok(route) => Ok(route),
Err(err_msg) => Err(PyErr::new::<RoutingError, _>(err_msg)),
};
#[cfg(not(feature = "profiling"))]
println!("{:?}", reg.change());
return res;
}
fn perf_test(&self, callback: PyObject, py: Python) -> PyResult<PyObject> {
use common::TreeNode;
let node = TreeNode {
pos: [-65.21875, 7.75, -111.03125],
flags: 1,
id: 0,
};
let goal = TreeNode {
pos: [-9530.5, -910.28125, 19808.125],
flags: 1,
id: 1,
};
let kwargs = vec![("goal", goal), ("node", node)].into_py_dict(py);
let mut n: usize = 0;
let mut d: f64 = 0.0;
let num_loops = 10_000_000;
loop {
let pool = unsafe { Python::new_pool(py) };
let t_start = std::time::Instant::now();
for _ in 0..num_loops {
let val: f64 = callback.call(py, (), Some(kwargs))?.extract(py)?;
}
d += t_start.elapsed().as_secs_f64();
drop(pool);
n += num_loops;
let dt = std::time::Duration::from_secs_f64(d / (n as f64));
println!("{}: {:?}", n, dt);
}
Ok(py.None())
}
#[args(grid_size = "1.0")]
#[pyo3(text_signature = "(grid_size)")]
fn get_grid(&self, grid_size: f32, py: Python) -> PyResult<PyObject> {
let stars_path = self.check_stars()?;
grid_stats(&stars_path, grid_size)
.map(|ret| ret.to_object(py))
.map_err(PyErr::new::<PyRuntimeError, _>)
}
#[args(hops = "*")]
#[text_signature = "(sys_1, sys_2, ..., /)"]
fn resolve_systems(&self, hops: &PyTuple, py: Python) -> PyResult<PyObject> {
let mut sys_entries: Vec<SysEntry> = Vec::new();
for hop in hops {
if let Ok(id) = hop.extract() {
sys_entries.push(SysEntry::ID(id));
} else {
sys_entries.push(SysEntry::parse(hop.extract()?));
}
}
println!("Resolving systems...");
let stars_path = PathBuf::from(self.stars_path.clone());
let ids: Vec<u32> = match resolve(&sys_entries, &stars_path) {
Ok(ids) => ids,
#[pyo3(text_signature = "(sys_1, sys_2, ..., /)")]
fn resolve(&self, hops: Vec<SysEntry>, py: Python) -> PyResult<PyObject> {
info!("Resolving systems...");
let stars_path = self.check_stars()?;
let systems: Vec<System> = match resolve(&hops, &stars_path, false) {
Ok(sytems) => sytems.into_iter().map(|sys| sys.into_system()).collect(),
Err(err_msg) => {
return Err(PyErr::new::<ValueError, _>(err_msg));
return Err(EdLrrError::ResolveError(err_msg).into());
}
};
let ret: Vec<(_, u32)> = hops.into_iter().zip(ids.into_iter()).collect();
let ret: Vec<(_, System)> = hops
.into_iter()
.zip(systems.iter())
.map(|(id, sys)| (id, sys.clone()))
.collect();
Ok(PyDict::from_sequence(py, ret.to_object(py))?.to_object(py))
}
#[staticmethod]
fn preprocess_edsm() -> PyResult<()> {
todo!("Implement EDSM Preprocessor")
}
#[staticmethod]
fn preprocess_galaxy() -> PyResult<()> {
todo!("Implement galaxy.json Preprocessor")
fn str_tree_test(&self) -> common::EdLrrResult<()> {
use common::BKTree;
const CHUNK_SIZE: usize = 4_000_000;
let path = self.check_stars()?;
let reader: csv::Reader<File> = csv::ReaderBuilder::new()
.has_headers(false)
.from_path(path)
.map_err(EdLrrError::from)?;
let mut data: Vec<String> = Vec::with_capacity(CHUNK_SIZE);
let t_start = Instant::now();
let mut base_id=0;
let mut wr = BufWriter::new(File::create("test.bktree")?);
for sys in reader.into_deserialize::<System>() {
let sys = sys?;
data.push(sys.name);
if data.len()>CHUNK_SIZE {
let tree = BKTree::new(&data, base_id);
tree.dump(&mut wr)?;
base_id=sys.id;
}
}
if !data.is_empty() {
let tree = BKTree::new(&data, base_id);
tree.dump(&mut wr)?;
}
wr.flush()?;
println!("Took: {:?}", t_start.elapsed());
Ok(())
}
}
@ -153,52 +388,91 @@ impl PyObjectProtocol for PyRouter {
}
}
fn resolve(entries: &[SysEntry], path: &PathBuf) -> Result<Vec<u32>, String> {
enum ResolveResult {
System(System),
ID(u32),
}
impl ResolveResult {
fn into_id(self) -> u32 {
match self {
Self::System(sys) => sys.id,
Self::ID(id) => id,
}
}
fn into_system(self) -> System {
if let Self::System(sys) = self {
return sys;
}
panic!("Tried to unwrap ID into System");
}
}
fn resolve(entries: &[SysEntry], path: &Path, id_only: bool) -> Result<Vec<ResolveResult>, String> {
let mut names: Vec<String> = Vec::new();
let mut ids: Vec<u32> = Vec::new();
let mut ret: Vec<u32> = Vec::new();
let mut needs_rtree = false;
for ent in entries {
match ent {
SysEntry::Name(name) => names.push(name.to_owned()),
SysEntry::ID(id) => ids.push(*id),
SysEntry::Pos(_) => {
needs_rtree |= true;
}
_ => (),
}
}
if !path.exists() {
return Err(format!(
"Source file \"{:?}\" does not exist!",
path.display()
));
return Err(format!("Source file {:?} does not exist!", path.display()));
}
let name_ids = find_matches(path, names, false)?;
let name_ids = if !names.is_empty() {
mmap_csv::mmap_csv(path, names)?
} else {
HashMap::new()
};
let tmp_r = needs_rtree
.then(|| {
let mut r = Router::new();
r.load(path).map(|_| r)
})
.transpose()?;
for ent in entries {
match ent {
SysEntry::Name(name) => {
let ent_res = name_ids
.get(&name.to_owned())
.get(name)
.ok_or(format!("System {} not found", name))?;
let sys = ent_res
.1
.as_ref()
.ok_or(format!("System {} not found", name))?;
if ent_res.0 < 0.75 {
println!(
"WARNING: {} match to {} with low confidence ({:.2}%)",
name,
sys.system,
ent_res.0 * 100.0
);
}
ret.push(sys.id);
ret.push(*sys);
}
SysEntry::ID(id) => ret.push(*id),
SysEntry::Pos((x, y, z)) => ret.push(
tmp_r
.as_ref()
.unwrap()
.closest(&[*x, *y, *z])
.ok_or("No systems loaded!")?
.id,
),
}
}
Ok(ret)
if id_only {
return Ok(ret.iter().map(|id| ResolveResult::ID(*id)).collect());
} else {
let mut lc = route::LineCache::create(path)?;
let mut systems = vec![];
for id in ret {
let sys = ResolveResult::System(lc.get(id)?.unwrap());
systems.push(sys)
}
return Ok(systems);
}
}
#[pyclass(dict)]
#[derive(Debug)]
#[derive(Debug, Clone)]
struct PyShip {
ship: Ship,
}
@ -219,8 +493,8 @@ impl PyShip {
#[staticmethod]
fn from_loadout(py: Python, loadout: &str) -> PyResult<PyObject> {
match Ship::new_from_json(loadout) {
Ok(ship) => Ok((PyShip { ship }).into_py(py)),
Err(err_msg) => Err(PyErr::new::<ValueError, _>(err_msg)),
Ok(ship) => Ok((PyShip { ship: ship.1 }).into_py(py)),
Err(err_msg) => Err(PyErr::new::<PyValueError, _>(err_msg)),
}
}
#[staticmethod]
@ -228,15 +502,15 @@ impl PyShip {
let mut ship = match Ship::new_from_journal() {
Ok(ship) => ship,
Err(err_msg) => {
return Err(PyErr::new::<ValueError, _>(err_msg));
return Err(PyErr::new::<PyValueError, _>(err_msg));
}
};
let ships: Vec<(PyObject, PyObject)> = ship
.drain()
.map(|(k, v)| {
let k_py = k.to_object(py);
let v_py = (PyShip { ship: v }).into_py(py);
(k_py, v_py)
.map(|(ship_name, ship)| {
let ship_name_py = ship_name.to_object(py);
let ship_py = (PyShip { ship }).into_py(py);
(ship_name_py, ship_py)
})
.collect();
Ok(PyDict::from_sequence(py, ships.to_object(py))?.to_object(py))
@ -246,239 +520,141 @@ impl PyShip {
self.ship.to_object(py)
}
#[text_signature = "(dist, /)"]
fn fuel_cost(&self, _py: Python, dist: f32) -> PyResult<f32> {
Ok(self.ship.fuel_cost(dist))
#[pyo3(text_signature = "(dist, /)")]
fn fuel_cost(&self, _py: Python, dist: f32) -> f32 {
self.ship.fuel_cost(dist)
}
#[text_signature = "(/)"]
fn range(&self, _py: Python) -> PyResult<f32> {
Ok(self.ship.range())
#[getter]
fn range(&self, _py: Python) -> f32 {
self.ship.range()
}
#[text_signature = "(/)"]
fn max_range(&self, _py: Python) -> PyResult<f32> {
Ok(self.ship.max_range())
#[getter]
fn max_range(&self, _py: Python) -> f32 {
self.ship.max_range()
}
#[text_signature = "(dist, /)"]
fn make_jump(&mut self, dist: f32, _py: Python) -> PyResult<Option<f32>> {
Ok(self.ship.make_jump(dist))
#[pyo3(text_signature = "(dist, /)")]
fn make_jump(&mut self, dist: f32, _py: Python) -> Option<f32> {
self.ship.make_jump(dist)
}
#[text_signature = "(dist, /)"]
fn can_jump(&self, dist: f32, _py: Python) -> PyResult<bool> {
Ok(self.ship.can_jump(dist))
#[pyo3(text_signature = "(dist, /)")]
fn can_jump(&self, dist: f32, _py: Python) -> bool {
self.ship.can_jump(dist)
}
#[args(fuel_amount = "None")]
#[text_signature = "(fuel_amount, /)"]
fn refuel(&mut self, fuel_amount: Option<f32>, _py: Python) -> PyResult<()> {
#[pyo3(text_signature = "(fuel_amount, /)")]
fn refuel(&mut self, fuel_amount: Option<f32>, _py: Python) {
if let Some(fuel) = fuel_amount {
self.ship.fuel_mass = (self.ship.fuel_mass + fuel).min(self.ship.fuel_capacity)
} else {
self.ship.fuel_mass = self.ship.fuel_capacity;
}
Ok(())
}
#[text_signature = "(factor, /)"]
fn boost(&mut self, factor: f32, _py: Python) -> PyResult<()> {
#[pyo3(text_signature = "(factor, /)")]
fn boost(&mut self, factor: f32, _py: Python) {
self.ship.boost(factor);
Ok(())
}
}
impl PyShip {
fn get_ship(&self) -> Ship {
self.ship.clone()
}
}
#[pyfunction]
fn preprocess_edsm(
_bodies_path: &str,
_systems_path: &str,
_out_path: &str,
_py: Python,
) -> PyResult<()> {
Err(pyo3::exceptions::PyNotImplementedError::new_err(
"please use Spansh's Galaxy dump and preprocess_galaxy()",
))
}
fn to_py_value(value: eval::Value, py: Python) -> PyResult<PyObject> {
type Value = eval::Value;
match value {
Value::String(s) => Ok(s.to_object(py)),
Value::Number(n) => {
if let Some(n) = n.as_u64() {
return Ok(n.to_object(py));
}
if let Some(n) = n.as_i64() {
return Ok(n.to_object(py));
}
return Ok(n.as_f64().unwrap().to_object(py));
}
Value::Bool(b) => Ok(b.to_object(py)),
Value::Array(mut t) => {
let mut res: Vec<PyObject> = vec![];
for v in t.drain(..) {
res.push(to_py_value(v, py)?);
}
Ok(PyTuple::new(py, &res).to_object(py))
}
Value::Object(o) => {
let res = PyDict::new(py);
for (k, v) in o.iter() {
res.set_item(k, to_py_value(v.clone(), py)?)?;
}
Ok(res.to_object(py))
}
Value::Null => Ok(py.None()),
}
}
fn to_py(res: Result<eval::Value, eval::Error>, py: Python) -> PyResult<PyObject> {
res.map_err(|e| PyErr::from(EdLrrError::EvalError(e)))
.and_then(|r| to_py_value(r, py))
}
#[pyfunction]
#[pyo3(text_signature = "(expr)")]
fn expr_test(expr: &str, py: Python) -> PyResult<PyObject> {
use eval::{to_value, Expr, Value};
let mut res = Expr::new(expr)
.compile()
.map_err(|e| PyErr::from(EdLrrError::EvalError(e)))?;
let mut hm: HashMap<&str, Value> = HashMap::new();
hm.insert("foo", to_value(42));
hm.insert("bar", to_value((-1, -2, -3)));
res = res.value("x", vec!["Hello", "world", "!"]);
res = res.value("y", 42);
res = res.value("p", (2.17, 5.14, 1.62));
res = res.value("hw", "Hello World!");
res = res.value("hm", hm);
to_py(res.exec(), py)
}
#[pyfunction]
#[pyo3(text_signature = "(path, out_path, /)")]
fn preprocess_galaxy(path: &str, out_path: &str) -> PyResult<()> {
use common::build_index;
use galaxy::process_galaxy_dump;
let path = PathBuf::from(path);
let out_path = PathBuf::from(out_path);
process_galaxy_dump(&path, &out_path).unwrap();
build_index(&out_path)?;
Ok(())
}
#[pymodule]
pub fn _ed_lrr(_py: Python, m: &PyModule) -> PyResult<()> {
better_panic::install();
pyo3_log::init();
profiling::init();
m.add_class::<PyRouter>()?;
m.add_class::<PyShip>()?;
/*
#[pyfn(m, "get_ships_from_journal")]
fn get_ships_from_journal(py: Python) -> PyResult<PyObject> {
let ship = match Ship::new_from_journal() {
Ok(ship) => ship,
Err(err_msg) => {
return Err(PyErr::new::<ValueError, _>(err_msg));
}
};
let ships: Vec<(_,_)> = ship.iter().map(|(k,v)| (k.to_object(py),v.to_object(py))).collect();
Ok(PyDict::from_sequence(py, ships.to_object(py))?.to_object(py))
}
#[pyfn(m, "get_ships_from_loadout")]
fn get_ship_from_loadout(py: Python, loadout: &str) -> PyResult<PyObject> {
let ship = match Ship::new_from_json(loadout) {
Ok(ship) => ship,
Err(err_msg) => {
return Err(PyErr::new::<ValueError, _>(err_msg));
}
};
Ok(ship.to_object(py))
}
*/
m.add_wrapped(pyo3::wrap_pyfunction!(preprocess_galaxy))?;
m.add_wrapped(pyo3::wrap_pyfunction!(preprocess_edsm))?;
m.add_wrapped(pyo3::wrap_pyfunction!(expr_test))?;
Ok(())
}
/*
/// Preprocess bodies.json and systemsWithCoordinates.json into stars.csv
#[pyfn(m, "preprocess")]
#[text_signature = "(infile_systems, infile_bodies, outfile, callback, /)"]
fn ed_lrr_preprocess(
py: Python<'static>,
infile_systems: String,
infile_bodies: String,
outfile: String,
callback: PyObject,
) -> PyResult<PyObject> {
use preprocess::*;
let state = PyDict::new(py);
let state_dict = PyDict::new(py);
callback.call(py, (state_dict,), None).unwrap();
let callback_wrapped = move |state: &PreprocessState| {
// println!("SEND: {:?}",state);
state_dict.set_item("file", state.file.clone())?;
state_dict.set_item("total", state.total)?;
state_dict.set_item("count", state.count)?;
state_dict.set_item("done", state.done)?;
state_dict.set_item("message", state.message.clone())?;
callback.call(py, (state_dict,), None)
};
preprocess_files(
&PathBuf::from(infile_bodies),
&PathBuf::from(infile_systems),
&PathBuf::from(outfile),
&callback_wrapped,
)
.unwrap();
Ok(state.to_object(py))
}
/// Find system by name
#[pyfn(m, "find_sys")]
#[text_signature = "(sys_names, sys_list_path, /)"]
fn find_sys(py: Python, sys_names: Vec<String>, sys_list: String) -> PyResult<PyObject> {
let path = PathBuf::from(sys_list);
match find_matches(&path, sys_names, false) {
Ok(vals) => {
let ret = PyDict::new(py);
for (key, (diff, sys)) in vals {
let ret_dict = PyDict::new(py);
if let Some(val) = sys {
let pos = PyList::new(py, val.pos.iter());
ret_dict.set_item("star_type", val.star_type.clone())?;
ret_dict.set_item("system", val.system.clone())?;
ret_dict.set_item("body", val.body.clone())?;
ret_dict.set_item("distance", val.distance)?;
ret_dict.set_item("pos", pos)?;
ret_dict.set_item("id", val.id)?;
ret.set_item(key, (diff, ret_dict).to_object(py))?;
}
}
Ok(ret.to_object(py))
}
Err(e) => Err(PyErr::new::<ValueError, _>(e)),
}
}
/// Compute a Route using the suplied parameters
#[pyfn(m, "route")]
#[text_signature = "(hops, range, mode, primary, permute, keep_first, keep_last, greedyness, precomp, path, num_workers, callback, /)"]
#[allow(clippy::too_many_arguments)]
fn py_route(
py: Python<'static>,
hops: Vec<&str>,
range: f32,
mode: String,
primary: bool,
permute: bool,
keep_first: bool,
keep_last: bool,
greedyness: Option<f32>,
precomp: Option<String>,
path: String,
num_workers: Option<usize>,
callback: PyObject,
) -> PyResult<PyObject> {
use route::*;
let num_workers = num_workers.unwrap_or(1);
let mode = match Mode::parse(&mode) {
Ok(val) => val,
Err(e) => {
return Err(PyErr::new::<ValueError, _>(e));
}
};
let state_dict = PyDict::new(py);
{
let cb_res = callback.call(py, (state_dict,), None);
if cb_res.is_err() {
println!("Error: {:?}", cb_res);
}
}
let callback_wrapped = move |state: &SearchState| {
state_dict.set_item("mode", state.mode.clone())?;
state_dict.set_item("system", state.system.clone())?;
state_dict.set_item("body", state.body.clone())?;
state_dict.set_item("depth", state.depth)?;
state_dict.set_item("queue_size", state.queue_size)?;
state_dict.set_item("d_rem", state.d_rem)?;
state_dict.set_item("d_total", state.d_total)?;
state_dict.set_item("prc_done", state.prc_done)?;
state_dict.set_item("n_seen", state.n_seen)?;
state_dict.set_item("prc_seen", state.prc_seen)?;
state_dict.set_item("from", state.from.clone())?;
state_dict.set_item("to", state.to.clone())?;
let cb_res = callback.call(py, (state_dict,), None);
if cb_res.is_err() {
println!("Error: {:?}", cb_res);
}
cb_res
};
let hops: Vec<SysEntry> = (hops.iter().map(|v| SysEntry::from_str(&v)).collect::<Result<Vec<SysEntry>,_>>())?;
println!("Resolving systems...");
let hops: Vec<u32> = match resolve(&hops, &PathBuf::from(&path)) {
Ok(ids) => ids,
Err(err_msg) => {
return Err(PyErr::new::<ValueError, _>(err_msg));
}
};
let opts = RouteOpts {
systems: hops,
range: Some(range),
file_path: PathBuf::from(path),
precomp_file: precomp.map(PathBuf::from),
callback: Box::new(callback_wrapped),
mode,
factor: greedyness,
precompute: false,
permute,
keep_first,
keep_last,
primary,
workers: num_workers,
};
match route(opts) {
Ok(Some(route)) => {
let hops = route.iter().map(|hop| {
let pos = PyList::new(py, hop.pos.iter());
let elem = PyDict::new(py);
elem.set_item("star_type", hop.star_type.clone()).unwrap();
elem.set_item("system", hop.system.clone()).unwrap();
elem.set_item("body", hop.body.clone()).unwrap();
elem.set_item("distance", hop.distance).unwrap();
elem.set_item("pos", pos).unwrap();
elem
});
let lst = PyList::new(py, hops);
Ok(lst.to_object(py))
}
Ok(None) => Ok(py.None()),
Err(e) => Err(PyErr::new::<ValueError, _>(e)),
}
}
*/

69
rust/src/mmap_csv.rs Normal file
View file

@ -0,0 +1,69 @@
use crate::common::{EdLrrError, EdLrrResult, System};
use crate::info;
use csv_core::{ReadFieldResult, Reader};
use memmap::Mmap;
use std::collections::HashMap;
use std::fs::File;
use std::path::Path;
pub fn mmap_csv(path: &Path, query: Vec<String>) -> Result<HashMap<String, Option<u32>>, String> {
let file = File::open(path).map_err(|e| e.to_string())?;
let mm = unsafe { Mmap::map(&file) }.map_err(|e| e.to_string())?;
let mut best = query
.iter()
.map(|s| (s, (s.as_bytes(), usize::MAX, u32::MAX)))
.collect::<Vec<(&String, (_, usize, u32))>>();
let t_start = std::time::Instant::now();
let dist = eddie::slice::DamerauLevenshtein::new();
let mut row = 0;
{
let mut data = &mm[..];
let mut rdr = Reader::new();
let mut field = [0; 1024];
let mut fieldidx = 0;
loop {
let (result, nread, nwrite) = rdr.read_field(data, &mut field);
data = &data[nread..];
let field = &field[..nwrite];
match result {
ReadFieldResult::InputEmpty => {}
ReadFieldResult::OutputFull => {
return Err("Encountered field larget than 1024 bytes!".to_string());
}
ReadFieldResult::Field { record_end } => {
if fieldidx == 1 {
for (_, (name_b, best_dist, id)) in best.iter_mut() {
let d = dist.distance(name_b, field);
if d < *best_dist {
*best_dist = d;
*id = row;
}
}
}
if record_end {
fieldidx = 0;
row += 1;
} else {
fieldidx += 1;
}
}
// This case happens when the CSV reader has successfully exhausted
// all input.
ReadFieldResult::End => {
break;
}
}
}
}
let search_result = best
.drain(..)
.map(|(query_name, (_, _, idx))| (query_name.clone(), Some(idx)))
.collect::<HashMap<String, Option<u32>>>();
let rate = (row as f64) / t_start.elapsed().as_secs_f64();
info!(
"Took: {:.2?}, {:.2} systems/second",
t_start.elapsed(),
rate
);
Ok(search_result)
}

15
rust/src/profiling.rs Normal file
View file

@ -0,0 +1,15 @@
#![cfg(feature = "profiling")]
use tracing::subscriber::set_global_default;
pub use tracing::{debug, error, info, span, trace, warn, Level};
pub use tracing::{debug_span, error_span, info_span, trace_span, warn_span};
use tracing_chrome::ChromeLayerBuilder;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::Registry;
use tracing_tracy::TracyLayer;
pub use tracy_client::ProfiledAllocator;
pub fn init() {
let (chrome_layer, _guard) = ChromeLayerBuilder::new().build();
let subscriber = Registry::default().with(chrome_layer);
set_global_default(subscriber).expect("setting default subscriber failed");
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,56 @@
use crate::common::TreeNode;
use crate::route::{Router, SearchState};
use fnv::FnvHashMap;
trait SearchAlgoImpl<State = (), Weight: Ord = ()> {
fn get_weight(&mut self, systems: &TreeNode, router: &Router) -> Option<Weight>;
fn get_neighbors(
&mut self,
system: &TreeNode,
router: &Router,
range: f32,
) -> Vec<(Weight, TreeNode)> {
let mut ret = vec![];
for nb in router.neighbours(system, range) {
if let Some(w) = self.get_weight(nb, router) {
ret.push((w, *nb));
}
}
return ret;
}
}
struct SearchAlgo<'a> {
algo: Box<dyn SearchAlgoImpl>,
prev: FnvHashMap<u32, u32>,
state: Option<SearchState>,
router: &'a Router,
}
struct BFS(usize);
impl SearchAlgoImpl for BFS {
fn get_weight(&mut self, _system: &TreeNode, _router: &Router) -> Option<()> {
return Some(());
}
}
impl<'a> SearchAlgo<'a> {
fn new(router: &'a Router, algo: Box<dyn SearchAlgoImpl>) -> Self {
Self {
algo,
prev: FnvHashMap::default(),
state: None,
router,
}
}
fn test(&mut self) {
// self.algo.get_neighbors
}
}
/*
a = 1 - acos(dot(u/Length(u),v/Length(v)))/PI
*/

View file

@ -1,5 +1,7 @@
//! Ship fuel consumption and jump range calculations
use crate::common::get_fsd_booster_info;
use crate::journal::*;
use eyre::Result;
use pyo3::conversion::ToPyObject;
use pyo3::prelude::*;
use pyo3::types::PyDict;
@ -10,21 +12,25 @@ use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
/// Frame Shift Drive information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FSD {
/// Rating
pub rating_val: f32,
/// Class
pub class_val: f32,
/// Optimized Mass
pub opt_mass: f32,
/// Max fuel per jump
pub max_fuel: f32,
/// Boost factor
pub boost: f32,
/// Guardian booster bonus range
pub guardian_booster: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Ship {
pub name: String,
pub ident: String,
pub ship_type: String,
pub base_mass: f32,
pub fuel_mass: f32,
pub fuel_capacity: f32,
@ -33,9 +39,6 @@ pub struct Ship {
impl Ship {
pub fn new(
name: String,
ident: String,
ship_type: String,
base_mass: f32,
fuel_mass: f32,
fuel_capacity: f32,
@ -57,15 +60,12 @@ impl Ship {
if fsd_type.1 < 2 || fsd_type.1 > 8 {
return Err(format!("Invalid class: {}", fsd_type.1));
};
if guardian_booster!=0 {
return Err("Guardian booster not yet implemented!".to_owned())
if guardian_booster != 0 {
return Err("Guardian booster not yet implemented!".to_owned());
}
let ret = Self {
name,
ident,
ship_type,
fuel_capacity,
fuel_mass,
base_mass,
@ -81,8 +81,8 @@ impl Ship {
Ok(ret)
}
pub fn new_from_json(data: &str) -> Result<Self, String> {
match serde_json::from_str::<Event>(&data) {
pub fn new_from_json(data: &str) -> Result<(String, Ship), String> {
match serde_json::from_str::<Event>(data) {
Ok(Event {
event: EventData::Unknown,
}) => {
@ -101,7 +101,7 @@ impl Ship {
};
}
pub fn new_from_journal() -> Result<HashMap<String, Self>, String> {
pub fn new_from_journal() -> Result<HashMap<String, Ship>, String> {
let mut ret = HashMap::new();
let re = Regex::new(r"^Journal\.\d{12}\.\d{2}\.log$").unwrap();
let mut journals: Vec<PathBuf> = Vec::new();
@ -110,12 +110,10 @@ impl Ship {
userprofile.push("Frontier Developments");
userprofile.push("Elite Dangerous");
if let Ok(iter) = userprofile.read_dir() {
for entry in iter {
if let Ok(entry) = entry {
if re.is_match(&entry.file_name().to_string_lossy()) {
journals.push(entry.path());
};
}
for entry in iter.flatten() {
if re.is_match(&entry.file_name().to_string_lossy()) {
journals.push(entry.path());
};
}
}
journals.sort();
@ -133,16 +131,7 @@ impl Ship {
}) => {}
Ok(ev) => {
if let Some(loadout) = ev.get_loadout() {
let mut ship = loadout.try_into_ship()?;
if ship.name == "" {
ship.name = "<NO NAME>".to_owned();
}
let key = format!(
"[{}] {} ({})",
ship.ident,
ship.name,
ship.ship_type.to_ascii_lowercase()
);
let (key, ship) = loadout.try_into_ship()?;
ret.insert(key, ship);
}
}
@ -179,7 +168,7 @@ impl Ship {
Some(cost)
}
fn jump_range(&self, fuel: f32, booster: bool) -> f32 {
pub fn jump_range(&self, fuel: f32, booster: bool) -> f32 {
let mass = self.base_mass + fuel;
let mut fuel = self.fsd.max_fuel.min(fuel);
if booster {
@ -198,6 +187,10 @@ impl Ship {
return self.jump_range(self.fuel_mass, true);
}
pub fn full_range(&self) -> f32 {
return self.jump_range(self.fuel_capacity, true);
}
fn boost_fuel_mult(&self) -> f32 {
if self.fsd.guardian_booster == 0.0 {
return 1.0;
@ -208,6 +201,21 @@ impl Ship {
return ((base_range + self.fsd.guardian_booster) / base_range).powf(self.fsd.class_val);
}
pub fn fuel_cost_for_jump(&self, fuel_mass: f32, dist: f32, boost: f32) -> Option<(f32, f32)> {
if dist == 0.0 {
return Some((0.0, 0.0));
}
let mass = self.base_mass + fuel_mass;
let opt_mass = self.fsd.opt_mass * boost;
let base_cost = (dist * mass) / opt_mass;
let fuel_cost = (self.fsd.rating_val * 0.001 * base_cost.powf(self.fsd.class_val))
/ self.boost_fuel_mult();
if fuel_cost > self.fsd.max_fuel || fuel_cost > fuel_mass {
return None;
};
return Some((fuel_cost, fuel_mass - fuel_cost));
}
pub fn fuel_cost(&self, d: f32) -> f32 {
if d == 0.0 {
return 0.0;
@ -220,29 +228,6 @@ impl Ship {
}
}
/*
#[derive(Debug,Clone, Serialize, Deserialize, ToPyObject)]
pub struct FSD {
pub rating_val: f32,
pub class_val: f32,
pub opt_mass: f32,
pub max_fuel: f32,
pub boost: f32,
pub guardian_booster: f32,
}
#[derive(Debug,Clone, Serialize, Deserialize, ToPyObject)]
pub struct Ship {
pub name: String,
pub ident: String,
pub ship_type: String,
pub base_mass: f32,
pub fuel_mass: f32,
pub fuel_capacity: f32,
pub fsd: FSD,
}
*/
impl FSD {
pub fn to_object(&self, py: Python) -> PyResult<PyObject> {
let elem = PyDict::new(py);
@ -259,9 +244,6 @@ impl FSD {
impl Ship {
pub fn to_object(&self, py: Python) -> PyResult<PyObject> {
let elem = PyDict::new(py);
elem.set_item("name", self.name.clone())?;
elem.set_item("ident", self.ident.clone())?;
elem.set_item("ship_type", self.ship_type.clone())?;
elem.set_item("base_mass", self.base_mass)?;
elem.set_item("fuel_mass", self.fuel_mass)?;
elem.set_item("fuel_capacity", self.fuel_capacity)?;

69
rust/tests/dot_impls.rs Normal file
View file

@ -0,0 +1,69 @@
#[inline(always)]
fn veclen(v: &[f32; 3]) -> f32 {
(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]).sqrt()
}
#[inline(always)]
fn dist2(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
let dx = p1[0] - p2[0];
let dy = p1[1] - p2[1];
let dz = p1[2] - p2[2];
dx * dx + dy * dy + dz * dz
}
#[inline(always)]
fn dist(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
dist2(p1, p2).sqrt()
}
#[inline(always)]
fn distm(p1: &[f32; 3], p2: &[f32; 3]) -> f32 {
let dx = (p1[0] - p2[0]).abs();
let dy = (p1[1] - p2[1]).abs();
let dz = (p1[2] - p2[2]).abs();
dx + dy + dz
}
/// Dot product (cosine of angle) between two 3D vectors
#[inline(always)]
pub fn ndot_vec_dist(u: &[f32; 3], v: &[f32; 3]) -> f32 {
let z: [f32; 3] = [0.0; 3];
let lm = dist(u, &z) * dist(v, &z);
((u[0] * v[0]) + (u[1] * v[1]) + (u[2] * v[2])) / lm
}
/// Dot product (cosine of angle) between two 3D vectors
#[inline(always)]
pub fn ndot_vec_len(u: &[f32; 3], v: &[f32; 3]) -> f32 {
let lm = veclen(u) * veclen(v);
((u[0] * v[0]) + (u[1] * v[1]) + (u[2] * v[2])) / lm
}
#[inline(always)]
pub fn ndot_iter(u: &[f32; 3], v: &[f32; 3]) -> f32 {
let l_1: f32 = u.iter().map(|e| e * e).sum();
let l_2: f32 = v.iter().map(|e| e * e).sum();
let lm = (l_1 * l_2).sqrt();
let mut ret = 0.0;
for (a, b) in u.iter().zip(v.iter()) {
ret += a * b;
}
ret / lm
}
#[cfg(test)]
mod dot_impl_tests {
#[test]
fn test_dot_impls() {
use super::*;
let v1 = [1.0, 2.0, 3.0];
let v2 = [4.0, 5.0, 6.0];
let d1 = ndot_vec_dist(&v1, &v2);
let d2 = ndot_vec_len(&v1, &v2);
let d3 = ndot_iter(&v1, &v2);
assert!((d1 - d2) < 0.01);
assert!((d2 - d3) < 0.01);
assert!((d3 - d1) < 0.01);
}
}