docs: Add skeleton for documentation
This commit is contained in:
		
							parent
							
								
									7438043f26
								
							
						
					
					
						commit
						dbc6f35ca2
					
				
					 6 changed files with 329 additions and 0 deletions
				
			
		
							
								
								
									
										1
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							|  | @ -12,3 +12,4 @@ plot.py | |||
| __pycache__ | ||||
| *.egg-info | ||||
| build | ||||
| *.pdf | ||||
							
								
								
									
										11
									
								
								doc/.vscode/settings.json
									
										
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								doc/.vscode/settings.json
									
										
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,11 @@ | |||
| { | ||||
|     "spellright.language": [ | ||||
|         "de", | ||||
|         "en" | ||||
|     ], | ||||
|     "spellright.documentTypes": [ | ||||
|         "markdown", | ||||
|         "latex", | ||||
|         "plaintext" | ||||
|     ] | ||||
| } | ||||
							
								
								
									
										48
									
								
								doc/Makefile
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								doc/Makefile
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,48 @@ | |||
| MD = $(wildcard src/*.md) | ||||
| DOTS = $(wildcard src/*.dot) | ||||
| ASYS = $(wildcard src/*.asy) | ||||
| PYS = $(wildcard src/img_*.py) | ||||
| PDFS = $(MD:src/%.md=out/%.pdf) | ||||
| 
 | ||||
| IMG_PDFS = $(ASYS:src/%.asy=img/%.pdf) $(PYS:src/img_%.py=img/%.pdf) $(DOTS:src/%.dot=img/%.pdf) | ||||
| 
 | ||||
| IMGS = $(IMG_PDFS) | ||||
| 
 | ||||
| TEMPLATE = eisvogel | ||||
| PDF_ENGINE = xelatex | ||||
| PANDOC = pandoc | ||||
| PANDOC_OPTIONS = -F panflute -F pandoc-citeproc --pdf-engine=$(PDF_ENGINE) --template $(TEMPLATE) -N --standalone --listings | ||||
| 
 | ||||
| GRAPHVIZ = dot | ||||
| GRAPHVIZ_OPTIONS = -Tpdf | ||||
| 
 | ||||
| ASY = asy | ||||
| ASY_OPTIONS = -noV -f pdf | ||||
| 
 | ||||
| PYTHON = python | ||||
| PYTHON_OPTIONS = | ||||
| 
 | ||||
| mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST))) | ||||
| current_dir := $(notdir $(patsubst %/,%,$(dir $(mkfile_path)))) | ||||
| 
 | ||||
| .PHONY: clean all default | ||||
| all: $(PDFS) | ||||
| default: all | ||||
| 
 | ||||
| out/%.pdf: src/%.md $(IMGS) Makefile | ||||
| 	$(PANDOC) $(PANDOC_OPTIONS) -o $@ $< | ||||
| 
 | ||||
| img/%.pdf: src/%.dot | ||||
| 	$(GRAPHVIZ) $(GRAPHVIZ_OPTIONS) -o $@ $< | ||||
| 
 | ||||
| img/%.pdf: src/img_%.py | ||||
| 	$(PYTHON) $(PYTHON_OPTIONS) $< $@ | ||||
| 
 | ||||
| img/%.pdf: src/%.asy | ||||
| 	$(ASY) $(ASY_OPTIONS) -o $@ $< | ||||
| 
 | ||||
| watch: | ||||
| 	watchexec -w src -w data -w filters -w Makefile make all | ||||
| 
 | ||||
| clean: | ||||
| 	-rm $(PDFS) $(IMGS) | ||||
							
								
								
									
										147
									
								
								doc/filters/multifilter.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										147
									
								
								doc/filters/multifilter.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,147 @@ | |||
| from panflute import * | ||||
| import tempfile | ||||
| import sys | ||||
| from jinja2 import Template, Environment, PackageLoader, select_autoescape | ||||
| import contextlib | ||||
| import io | ||||
| import hashlib | ||||
| from dateutil.parser import parse as dateparse | ||||
| from functools import partial | ||||
| import subprocess as SP | ||||
| import panflute as pf | ||||
| import os | ||||
| import csv | ||||
| import datetime | ||||
| import re | ||||
| 
 | ||||
| 
 | ||||
| def remove_pound(elem, doc): | ||||
|     if type(elem) == Str: | ||||
|         return Str(elem.text.lstrip("#")) | ||||
| 
 | ||||
| 
 | ||||
| def fix_color(elem, doc): | ||||
|     if type(elem) == MetaMap: | ||||
|         for k in elem.content: | ||||
|             if k.endswith("-color"): | ||||
|                 elem[k] = elem[k].walk(remove_pound) | ||||
| 
 | ||||
| 
 | ||||
| def update_date(elem, doc): | ||||
|     if type(elem) == MetaMap: | ||||
|         datefmt = doc.get_metadata('datefmt', "%Y-%m-%d") | ||||
|         today = datetime.date.today().strftime(datefmt) | ||||
|         date = dateparse(doc.get_metadata('date', today)).date() | ||||
|         elem['date'] = MetaInlines(Str(date.strftime(datefmt))) | ||||
|         return elem | ||||
| 
 | ||||
| 
 | ||||
| def csv_table(elem, doc): | ||||
|     if type(elem) == Para and len(elem.content) == 1 and type(elem.content[0]) == Image: | ||||
|         elem = elem.content[0] | ||||
|         ext = os.path.splitext(elem.url)[1][1:] | ||||
|         if ext == "csv": | ||||
|             caption = elem.content | ||||
|             has_header = elem.attributes.get( | ||||
|                 'has-header', "false").lower() == "true" | ||||
|             with open(elem.url) as f: | ||||
|                 reader = csv.reader(f) | ||||
|                 body = [] | ||||
|                 for row in reader: | ||||
|                     cells = [TableCell(Plain(Str(x))) for x in row] | ||||
|                     body.append(TableRow(*cells)) | ||||
|             header = body.pop(0) if has_header else None | ||||
|             ret = Table(*body, header=header, caption=caption) | ||||
|             return ret | ||||
| 
 | ||||
| 
 | ||||
| def code_refs(elem, doc): | ||||
|     if type(elem) == Cite: | ||||
|         label = elem.content[0] | ||||
|         if type(label) == Str: | ||||
|             label = label.text | ||||
|             filename = re.findall(r"^\[@lst:(.*)\]$", label) or [None] | ||||
|             if filename[0] in doc.inc_files: | ||||
|                 return [RawInline("\\hyperref[{}]{{{}}}".format(filename[0], filename[0]), format="tex")] | ||||
| 
 | ||||
| 
 | ||||
| def include_code(elem, doc): | ||||
|     if type(elem) == CodeBlock: | ||||
|         if "include" in elem.attributes: | ||||
|             filepath = elem.attributes.pop("include") | ||||
|             filename = os.path.split(filepath)[-1] | ||||
|             try: | ||||
|                 elem.text += elem.text + \ | ||||
|                     open(filepath, encoding="utf-8").read() | ||||
|                 elem.attributes['caption'] = filename | ||||
|                 doc.inc_files.append(filename) | ||||
|             except Exception as e: | ||||
|                 elem.text += "Error: {}".format(e) | ||||
|             return [RawBlock("\\label{{{}}}".format(filename), format="tex"), elem] | ||||
| 
 | ||||
| 
 | ||||
| def py_eval(options, data, element, doc): | ||||
|     out_buffer = io.StringIO() | ||||
|     with contextlib.redirect_stdout(out_buffer): | ||||
|         exec(data, doc.pyenv) | ||||
|     out_buffer.seek(0) | ||||
|     return convert_text(out_buffer.read()) | ||||
| 
 | ||||
| 
 | ||||
| def jinja_py_filt(doc, file): | ||||
|     env = {} | ||||
|     code = open(file, encoding="utf-8").read() | ||||
|     exec(code, env) | ||||
|     return env['main'](doc) | ||||
| 
 | ||||
| 
 | ||||
| def prepare(doc): | ||||
|     doc.inc_files = [] | ||||
|     doc.env = Environment() | ||||
|     doc.pyenv = {} | ||||
|     filters = {'py': partial(jinja_py_filt, doc)} | ||||
|     doc.env.filters.update(filters) | ||||
| 
 | ||||
| 
 | ||||
| def process_templates(elem, doc): | ||||
|     if type(elem) == CodeBlock: | ||||
|         if elem.classes == ["@"]: | ||||
|             args = {'meta': doc.get_metadata()} | ||||
|             return convert_text(doc.env.from_string(elem.text).render(args)) | ||||
| 
 | ||||
| 
 | ||||
| def yaml_filt(elem, doc): | ||||
|     tags = { | ||||
|         'eval': py_eval, | ||||
|     } | ||||
|     return yaml_filter(elem, doc, tags=tags, strict_yaml=True) | ||||
| 
 | ||||
| 
 | ||||
| def checkboxes(elem, doc): | ||||
|     if type(elem) in [Para, Plain]: | ||||
|         val = re.findall(r"^\[([xX]|\ )\] (.*)$", stringify(elem)) | ||||
|         if val: | ||||
|             val = val[0][0].lower() == "x" | ||||
|         else: | ||||
|             return elem | ||||
|         marker = { | ||||
|             True: RawInline("$\\boxtimes$", format="latex"), | ||||
|             False: RawInline("$\\square$", format="latex"), | ||||
|         }[val] | ||||
|         cont = [] | ||||
|         if val: | ||||
|             cont += elem.content[2:] | ||||
|         else: | ||||
|             cont += elem.content[4:] | ||||
|         return Plain(marker, Space, *cont) | ||||
|     return elem | ||||
| 
 | ||||
| 
 | ||||
| def main(doc=None): | ||||
|     f = [process_templates, update_date, csv_table, include_code, | ||||
|          fix_color, code_refs, yaml_filt, checkboxes] | ||||
|     return run_filters(f, prepare=prepare, doc=doc) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										58
									
								
								doc/src/ed-lrr.md
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								doc/src/ed-lrr.md
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,58 @@ | |||
| --- | ||||
| # Metadata | ||||
| title: ED_LRR | ||||
| author: | ||||
| - Daniel Seiller <earthnuker@gmail.com> | ||||
| subtitle: 'Elite Dangerous: Long-Range Router' | ||||
| 
 | ||||
| # Formating | ||||
| toc: true | ||||
| lang: en | ||||
| colorlinks: true | ||||
| papersize: a4 | ||||
| numbersections: true | ||||
| 
 | ||||
| #Panflute options | ||||
| panflute-filters: [multifilter] | ||||
| panflute-path: 'filters' | ||||
| 
 | ||||
| #Template options | ||||
| titlepage: true | ||||
| toc-own-page: false | ||||
| --- | ||||
| 
 | ||||
| # How it works | ||||
| 
 | ||||
| ## `stars.csv` format | ||||
| 
 | ||||
| ### Columns | ||||
| 
 | ||||
| | Name      | Content                                                             | | ||||
| | --------- | ------------------------------------------------------------------- | | ||||
| | id        | unique ID-Number (not to be confused with EDSM id or id64)          | | ||||
| | star_type | Type of Star                                                        | | ||||
| | system    | Name of System                                                      | | ||||
| | body      | Name of Star                                                        | | ||||
| | mult      | Jump Range Multiplier (1.5 for White Dwarfs, 4.0 for Neutron Stars) | | ||||
| | distance  | Distance from arrival in Ls                                         | | ||||
| | x,y,z     | Position in Galactic Coordinates with Sol at (0,0,0)                | | ||||
| 
 | ||||
| ## Routing Algorithms | ||||
| 
 | ||||
| ### Breadth-First Search (BFS) | ||||
| 
 | ||||
| ### A*-Search | ||||
| 
 | ||||
| ### Greedy Search | ||||
| 
 | ||||
| ## Optimizations | ||||
| 
 | ||||
| ## Routing Graphs | ||||
| 
 | ||||
| # Usage | ||||
| 
 | ||||
| ## Preprocessing Data | ||||
| 
 | ||||
| ## Plotting a Route | ||||
| 
 | ||||
| # [Changelog](https://gitlab.com/Earthnuker/ed_lrr/blob/pyqt_gui/CHANGELOG.md) | ||||
							
								
								
									
										64
									
								
								doc/src/img_out.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								doc/src/img_out.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,64 @@ | |||
| import sys | ||||
| import pylab as PL | ||||
| import numpy as np | ||||
| from scipy.spatial.ckdtree import cKDTree | ||||
| import heapq | ||||
| 
 | ||||
| exit() | ||||
| 
 | ||||
| 
 | ||||
| def vec(a, b): | ||||
|     return b - a | ||||
| 
 | ||||
| 
 | ||||
| def bfs(points): | ||||
|     return | ||||
| 
 | ||||
| 
 | ||||
| def in_ellipse(p, f1, f2, r, offset=0): | ||||
|     df = ((f1 - f2) ** 2).sum(0) ** 0.5 | ||||
|     d_f1 = ((p - f1) ** 2).sum(1) ** 0.5 | ||||
|     d_f2 = ((p - f2) ** 2).sum(1) ** 0.5 | ||||
|     return (d_f1 + d_f2) < (df * (1 + r)) | ||||
| 
 | ||||
| 
 | ||||
| num_points = 100000 | ||||
| 
 | ||||
| p_orig = np.random.normal(0, 10, size=(num_points, 2)) | ||||
| tree = cKDTree(p_orig) | ||||
| f1 = np.array([0, -30]) | ||||
| f2 = -f1  # np.random.normal(0, 20, (3,)) | ||||
| # r = 2 ** ((n / cnt) - cnt) | ||||
| 
 | ||||
| mask = in_ellipse(p_orig, f1, f2, 0.1) | ||||
| 
 | ||||
| p = p_orig[mask] | ||||
| p_orig = p_orig[~mask] | ||||
| 
 | ||||
| colors = np.random.random(p.shape[0]) | ||||
| fig = PL.gcf() | ||||
| PL.scatter( | ||||
|     p_orig[:, 0], | ||||
|     p_orig[:, 1], | ||||
|     marker=".", | ||||
|     s=0.2, | ||||
|     edgecolor="None", | ||||
|     c=[(0.0, 0.0, 0.0)], | ||||
|     alpha=0.75, | ||||
|     rasterized=True, | ||||
| ) | ||||
| PL.scatter( | ||||
|     p[:, 0], p[:, 1], marker="s", s=0.2, edgecolor="None", c=colors, rasterized=True | ||||
| ) | ||||
| PL.plot(f1[0], f1[1], "r.", label="Source") | ||||
| PL.plot(f2[0], f2[1], "g.", label="Destination") | ||||
| 
 | ||||
| max_v = max(p_orig[:, 0].max(), p_orig[:, 1].max(), f1[0], f1[1], f2[0], f2[1]) + 2 | ||||
| min_v = min(p_orig[:, 0].min(), p_orig[:, 1].min(), f1[0], f1[1], f2[0], f2[1]) - 2 | ||||
| 
 | ||||
| 
 | ||||
| PL.xlim(min_v, max_v) | ||||
| PL.ylim(min_v, max_v) | ||||
| 
 | ||||
| PL.legend() | ||||
| PL.savefig(sys.argv[1], dpi=1200) | ||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue