From 012d9089f758d653334f28ddd2ed68c572f04210 Mon Sep 17 00:00:00 2001 From: JonoCode9374 <36217120+JonoCode9374@users.noreply.github.com> Date: Sat, 28 Dec 2019 11:14:53 +1100 Subject: [PATCH] Add files via upload --- ekg/Parse.py | 75 +++++++++ ekg/TokenLib.py | 30 ++++ ekg/__pycache__/Parse.cpython-37.pyc | Bin 0 -> 1327 bytes ekg/__pycache__/TokenLib.cpython-37.pyc | Bin 0 -> 1271 bytes ekg/chainify.py | 205 ++++++++++++++++++++++++ 5 files changed, 310 insertions(+) create mode 100644 ekg/Parse.py create mode 100644 ekg/TokenLib.py create mode 100644 ekg/__pycache__/Parse.cpython-37.pyc create mode 100644 ekg/__pycache__/TokenLib.cpython-37.pyc create mode 100644 ekg/chainify.py diff --git a/ekg/Parse.py b/ekg/Parse.py new file mode 100644 index 0000000..ffc857c --- /dev/null +++ b/ekg/Parse.py @@ -0,0 +1,75 @@ +# A Simple Parser For Ekg + +import TokenLib +from TokenLib import Token + + +class Parser(): + def __init__(self, source: str): + self.source = source + + def parse(self, optional_source: str = "") -> [TokenLib.Token]: + + # Assumes that the source is already balanced + + # Variables used for determining if a block is being parsed/ + # if an escape sequence is present + + escaped = False + block_level = 0 + temp_block = "" + token_list = [] + + if optional_source: + code = optional_source + else: + code = self.source + + for char in code: + if escaped: + self.token_list.append(Token(TokenLib.ESCAPE, char)) + escaped = False + + elif char == "\\": + escaped = True + + elif char == "[": + if block_level >= 1: + block_level += 1 + temp_block += char + + else: + block_level = 1 + + elif block_level: + if char == "]": + if block_level == 1: + token_list.append(Token(TokenLib.BLOCK, + self.parse(temp_block))) + block_level = 0 + temp_block = "" + + else: + temp_block += char + block_level -= 1 + + else: + temp_block += char + + else: + token_list.append(Token(TokenLib.INSTRUCTION, + char)) + + if temp_block: + token_list.append(Token(TokenLib.BLOCK, + temp_block)) + + return token_list + +if __name__ == "__main__": + source: str = "[P[DD+s]]S3" + parser: Parser = Parser(source) + tokens: [Token] = parser.parse() + + for token in tokens: + print(token.get_data()) diff --git a/ekg/TokenLib.py b/ekg/TokenLib.py new file mode 100644 index 0000000..92b66ad --- /dev/null +++ b/ekg/TokenLib.py @@ -0,0 +1,30 @@ +# A Simple Token Class For Ekg + + +class Token(): + def __init__(self, name: str, value: str) -> None: + self.tkn_name: str = name + self.tkn_value: str = value + + def get_name(self) -> str: + return self.tkn_name + + def get_value(self) -> str: + return self.tkn_value + + def get_data(self) -> (str, str): + return (self.tkn_name, self.tkn_value) + + def set_name(self, name: str): + self.tkn_name = name + + def set_value(self, value: str): + self.tkn_value = name + +# Token names + +INSTRUCTION = "instruction" +BLOCK = "block" +STRING = "string" +INTEGER = "integer" +ESCAPE = "escape" diff --git a/ekg/__pycache__/Parse.cpython-37.pyc b/ekg/__pycache__/Parse.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..182d75c4d48ede6695aef84cc0e6e8a39dd6c2c4 GIT binary patch literal 1327 zcmZuwOK%)S5boFX%+9VK3Xwn&TqCUzUL+*oU_~U}08t{x!Zwk{G0kMV*{pYVX19Ax z9BYP){U5mGU>~`2=9OzM3cU5(N^-Wdvy_J=izJ$XV2WU^+O)m-yh3`reS)ZVISEpu6sBT~zgKPw3} z_@fRhX1@g+yM-L1Pj_mkk>qsLnhn_!CVle8af@9urKzqR+^@pd<|0mi|WX+q-$ z>D1_?{VXd6W0jeoO_s!^nM{?p*mXQ628A{tpQqNh&|JWCB=$WJZJ4ePf`o$lj|nQ7 zWHD$rVHf!s(TK)eg3s^hDtIMF8+^0__LxTOw6o|aPaPW>Gt!N){ql5Tlxkw7CWSuE zaNkjCcARDl4|k=t*jV-k3QUe?_zr7)0@hRKu)~eRmlDxcZqb5qCg~_w>a=~ZbI|X9 z><$liKbu!TdQ!HLz7npx@=hGQY~M$Lc=i@zttpk}c~PeL9Gp`2Lv&%z>__MiJPwq} oj`FgJhNe_HEz|#_+eDl5pT2{E9R42z)*Zfj2P9pmHz3{r0Te|owg3PC literal 0 HcmV?d00001 diff --git a/ekg/__pycache__/TokenLib.cpython-37.pyc b/ekg/__pycache__/TokenLib.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..29516e6a4d16d48438bd4884a24663d041ade1bb GIT binary patch literal 1271 zcma)5&2G~`5Z?7)?EIEU@CZmbkerbaDh*K-wQ^CDUVOlEy_?icVuxLaKjlWBfOB7f zr{F#I%3E+^X6;y^P{c+vyW`oJZ@$^dNvl;Su)cqF-hMO)`GLlMnQ%A;kxxO52_uZ& zf_yOo!i<12GccGHn9L3=<^(pYO-av<;cm4oI30t?Yfzj(3<(Twa+6VJ+?mW|)~yv- zVB2?;IjjbjU0N>lz;f{R+|d3yYrvNp_?vLM#oA!GV0FMcVvoS`7=;?T%ARFQp87pg zS!uN9%6=cMwp=-a=Ub7+NDLS5K$;9ey zC11@2#tTgJjR+UEpU(%Ek_$Pw$=0M+LfKihfwQqi7Zm9CqZUtepzX1OYYJ zba{pHz*O#n=b9E;5_ymcYw!9$5CnYy#KTOwW6cAUPQj@1{RUIL2u2_m5IQ%?*U*f_ zKM*x77Ufar0I+N!0-_6wMU+^Rh6C;uEdC*iLD-K#*+!#iBVPQYm$K|-7o7Kcxpx65 z&dI}|IWRq#yJEC7(-IlO?<0GdUN`vh0d(2m%GayoaN!Jxp{j@BI%C@vo||F#ZX2yi zk12B@kiTf4!Uqs7RBcq)&;xj!c7d}aaQbj0B?h>(Yqia`*#*&_ppm4IcN^zPmMZ&d zmBmZtz?URl1hpj1`GO1Oa2ZD%?)MxWq#6@{I=i?W&L-!+vQN&=ho{PcFB5;PY7>7p f8jmiNGnx)xy&3(jDCNE=NWRpSA*uEgix$6ME&%?D literal 0 HcmV?d00001 diff --git a/ekg/chainify.py b/ekg/chainify.py new file mode 100644 index 0000000..ff0e3d0 --- /dev/null +++ b/ekg/chainify.py @@ -0,0 +1,205 @@ +import Parse +import TokenLib +import string + +dyads: str = string.punctuation +monads: str = string.ascii_uppercase +nilads: str = "1234567890" + +source: str = str(reversed(input())) + +def balance(source: str) -> str: + + + ''' + + a[bc --> a[bc] + ab} --> {ab} + m{ab]c -> [m{ab}]c + + ''' + + final: str = "" + brackets: [str] = [] + temp: str = "" + escaped: str = False + + for char in source: + + if escaped: + final += "\\" + char + escaped = False + + elif char == "\\": + escaped = True + + elif char in "[{": + brackets.append(char) + final += char + + elif char == "}": + if brackets: + if brackets[-1] == "{": + final += char + + else: + final = "{" + final + "]}" + + brackets.pop() + + else: + final = "[" + final + "]" + + elif char == "]": + if brackets: + if brackets[-1] == "[": + final += char + + else: + final = "[" + final + "}]" + + brackets.pop() + + else: + final = "[" + final + "]" + + else: + final += char + + if brackets: + for char in brackets: + if char == "[": + final += "]" + else: + final += "}" + return final.replace("{", "[").replace("}", "]") + + +def arities(source: [TokenLib.Token]) -> [(int, TokenLib.Token)]: + + arity_list: [(int, TokenLib.Token)] = [] + for token in source: + if token.get_name() == TokenLib.BLOCK: + arity_list.append([0, + arities(token.get_data())]) + + +''' +arities = [] +for char in source: + if char in dyads: + arities.append((2, char)) + + elif char in monads: + arities.append((1, char)) + + else: + arities.append((0, char)) + +exprs = [] +expr = [] +patterns = ["0", "1", "2", + "020", "021", "022", "02", "10", "11", "12", "20", "21", "22", + "102", "110", "111", "112", "120", "121", "122", + "202", "210", "211", "212", "220", "221", "222"] +pattern = "" +while len(arities): + if pattern in patterns and pattern + str(arities[-1][0]) not in patterns: + exprs.append([pattern, expr]) + expr = [] + pattern = "" + + pattern += str(arities[-1][0]) + expr.append(arities[-1][1]) + arities.pop() + +if expr and pattern in patterns: + exprs.append([pattern, expr]) + expr = [] + pattern = "" + +print(exprs) + + +for exp in exprs: + pattern, fns = exp + + if pattern == "0": + print(fns[0]) + + elif pattern == "1": + print(fns[0] + "(R)") + + elif pattern == "2": + print(fns[0] + "(L, R)") + + elif pattern == "020": + print(fns[1] + "(" + fns[0] + ", " + fns[2] + ")") + + elif pattern == "021": + print(fns[1] + "(" + fns[0] + ", " + fns[2] + "(R))") + + elif pattern == "022": + print(fns[1] + "(" + fns[0] + ", " + fns[2] + "(L, R))") + + elif pattern == "02": + print(fns[1] + "(" + fns[0] + ", R)") + + elif pattern == "10": + print(fns[0] + "(" + fns[1] + ")") + + elif pattern == "11": + print(fns[0] + "(" + fns[1] + "(R))") + + elif pattern == "12": + print(fns[1] + "(" + fns[0] + ", " + "R)") + + elif pattern == "20": + print(fns[0] + "(L, " + fns[1] + ")") + + elif pattern == "21": + print(fns[0] + "(L, " + fns[1] + ")") + + elif pattern == "22": + print(fns[1] + "(" + fns[0] + "(L, R), R*)") + + elif pattern == "102": + print(fns[2] + "(" + fns[0] + "(" + fns[1] + "), R)") + + elif pattern == "110": + print(fns[0] + "(" + fns[1] + "(" + fns[2] + "))") + + elif pattern == "111": + print(fns[0] + "(" + fns[1] + "(" + fns[2] + "(R)))") + + elif pattern == "120": + print(fns[1] + "(" + fns[0] + "(R), " + fns[2] + ")") + + elif pattern == "121": + print(fns[1] + "(" + fns[0] + "(R), " + fns[2] + "(R*))") + + elif pattern == "122": + print(fns[1] + "(" + fns[0] + "(R), " + fns[2] + "(L*, R*))") + + elif pattern == "202": + print(fns[2] + "(" + fns[0] + "(L, " + fns[1] + "), R*)") + + elif pattern == "210": + print(fns[0] + "(L, " + fns[1] + "(" + fns[2] + "))") + + elif pattern == "211": + print(fns[0] + "(L, " + fns[1] + "(" + fns[2] + "(R*)))") + + elif pattern == "212": + print(fns[2] + "(" + fns[0] + "(L, " + fns[1] + "(R*)), R&)") + + elif pattern == "220": + print(fns[1] + "(" + fns[0] + "(L, R), " + fns[2] + ")") + + elif pattern == "221": + print(fns[1] + "(" + fns[0] + "(L, R), " + fns[2] + "(R*))") + + elif pattern == "222": + print(fns[2] + "(" + fns[1] + "(" + fns[0] + "(L, R), R*), R&)") +''' +