parse: move tokenize_line/tokenize_lines to a new module
At times I forget what I have already written. I hope that this might help keep things more organized.
This commit is contained in:
parent
60c346406c
commit
a921a44b66
@ -1,6 +1,7 @@
|
||||
from functools import partial
|
||||
from parse.maps_blocks import tokenize_block
|
||||
from parse.map_header import tokenize_line
|
||||
from parse import number
|
||||
from parse.generic import macro_line
|
||||
|
||||
def tokenize_lines(lines):
|
||||
for line in lines:
|
||||
@ -8,7 +9,7 @@ def tokenize_lines(lines):
|
||||
yield tokenize_block(line, delim='::')
|
||||
elif 'coll_tiles' in line:
|
||||
tokens, = tokenize_block(line, delim='::')
|
||||
yield tokenize_line(tokens)
|
||||
yield macro_line.tokenize_line(tokens)
|
||||
|
||||
def flatten(tokens):
|
||||
stack = []
|
||||
|
22
tools/parse/generic/macro_line.py
Normal file
22
tools/parse/generic/macro_line.py
Normal file
@ -0,0 +1,22 @@
|
||||
def tokenize_params(params):
|
||||
for param in params:
|
||||
if '|' in param:
|
||||
yield [p.strip() for p in param.split(' |')]
|
||||
else:
|
||||
yield param.strip()
|
||||
|
||||
def tokenize_line(line):
|
||||
line = line.split(';')[0].strip()
|
||||
key_params = line.split(' ', maxsplit=1)
|
||||
if len(key_params) == 1:
|
||||
return tuple(key_params)
|
||||
else:
|
||||
key, params = key_params
|
||||
params = [p.strip() for p in params.split(',')]
|
||||
return key, list(tokenize_params(params))
|
||||
|
||||
def tokenize_lines(lines, prefix=""):
|
||||
for line in filter(bool, lines):
|
||||
line = line.strip()
|
||||
if line.startswith(prefix):
|
||||
yield tokenize_line(line)
|
@ -1,7 +1,7 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from parse import number
|
||||
from parse import map_header
|
||||
from parse.generic import macro_line
|
||||
|
||||
def tokenize_label(line):
|
||||
return line.split(':')[0].strip()
|
||||
@ -11,9 +11,9 @@ def tokenize_lines(lines):
|
||||
if line.strip().endswith(':'):
|
||||
yield (tokenize_label(line),)
|
||||
elif ('dw ' in line or 'db ' in line) and ' \\' not in line:
|
||||
yield map_header.tokenize_line(line)
|
||||
yield macro_line.tokenize_line(line)
|
||||
elif 'hidden_object' in line or 'hidden_text_predef' in line:
|
||||
yield map_header.tokenize_line(line)
|
||||
yield macro_line.tokenize_line(line)
|
||||
|
||||
def flatten0(tokens):
|
||||
stack = []
|
||||
|
@ -1,14 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
|
||||
from parse.map_header import tokenize_line
|
||||
from parse.generic import macro_line
|
||||
|
||||
def tokenize_map_const(line):
|
||||
return tokenize_line(line)
|
||||
|
||||
def tokenize_lines(lines):
|
||||
for line in lines:
|
||||
if "map_const" in line:
|
||||
yield tokenize_map_const(line)
|
||||
tokenize_lines = partial(macro_line.tokenize_lines, prefix="map_const")
|
||||
|
||||
@dataclass
|
||||
class MapConstant:
|
||||
@ -17,12 +12,12 @@ class MapConstant:
|
||||
|
||||
def flatten(tokens):
|
||||
for macro, args in tokens:
|
||||
if macro == 'map_const':
|
||||
name, width, height = args
|
||||
yield name, MapConstant(
|
||||
int(width),
|
||||
int(height)
|
||||
)
|
||||
assert macro == 'map_const', (macro, args)
|
||||
name, width, height = args
|
||||
yield name, MapConstant(
|
||||
int(width),
|
||||
int(height)
|
||||
)
|
||||
|
||||
def parse(prefix):
|
||||
path = prefix / "constants/map_constants.asm"
|
||||
|
@ -1,28 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from parse.generic import macro_line
|
||||
|
||||
# data/maps/headers/AgathasRoom.asm
|
||||
|
||||
def tokenize_params(params):
|
||||
for param in params:
|
||||
if '|' in param:
|
||||
yield [p.strip() for p in param.split(' |')]
|
||||
else:
|
||||
yield param.strip()
|
||||
|
||||
def tokenize_line(line):
|
||||
line = line.split(';')[0].strip()
|
||||
key_params = line.split(' ', maxsplit=1)
|
||||
if len(key_params) == 1:
|
||||
return tuple(key_params)
|
||||
else:
|
||||
key, params = key_params
|
||||
params = [p.strip() for p in params.split(',')]
|
||||
return key, list(tokenize_params(params))
|
||||
|
||||
def tokenize_lines(lines):
|
||||
for line in filter(bool, lines):
|
||||
yield tokenize_line(line)
|
||||
|
||||
@dataclass
|
||||
class MapHeader:
|
||||
name1: str
|
||||
@ -101,6 +82,8 @@ def flatten(tokens):
|
||||
key=lambda c: c.name))
|
||||
)
|
||||
|
||||
tokenize_lines = macro_line.tokenize_lines
|
||||
|
||||
def parse(path):
|
||||
with open(path) as f:
|
||||
tokens = list(tokenize_lines(f.read().split('\n')))
|
||||
|
@ -1,7 +1,7 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from parse import number
|
||||
from parse.map_header import tokenize_line
|
||||
from parse.generic import macro_line
|
||||
|
||||
@dataclass
|
||||
class ObjectEvent:
|
||||
@ -36,7 +36,7 @@ def tokenize_label(line):
|
||||
return ('label', line.split(':')[0].strip())
|
||||
|
||||
def tokenize_event(line):
|
||||
return list(tokenize_line(line))
|
||||
return list(macro_line.tokenize_line(line))
|
||||
|
||||
def tokenize_border_block(line):
|
||||
return ('border_block', number.parse(line.strip().split()[1]))
|
||||
|
@ -1,12 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from parse.map_header import tokenize_line
|
||||
from functools import partial
|
||||
from parse import number
|
||||
from parse.generic import macro_line
|
||||
|
||||
def tokenize_lines(lines):
|
||||
for line in lines:
|
||||
if 'overworld_sprite ' in line:
|
||||
yield tokenize_line(line)
|
||||
tokenize_lines = partial(macro_line.tokenize_lines, prefix='overworld_sprite')
|
||||
|
||||
@dataclass
|
||||
class Spritesheet:
|
||||
|
@ -1,17 +1,14 @@
|
||||
from parse.map_header import tokenize_line
|
||||
|
||||
def tokenize_lines(lines):
|
||||
for line in lines:
|
||||
if 'const' in line:
|
||||
yield tokenize_line(line)
|
||||
from functools import partial
|
||||
from parse.generic import macro_line
|
||||
|
||||
tokenize_lines = partial(macro_line.tokenize_lines, prefix='const ')
|
||||
|
||||
def flatten(tokens):
|
||||
index = 0
|
||||
for t in tokens:
|
||||
if t[0] == 'const':
|
||||
yield t[1][0], index
|
||||
index += 1
|
||||
assert t[0] == 'const', t
|
||||
yield t[1][0], index
|
||||
index += 1
|
||||
|
||||
def parse(prefix):
|
||||
path = prefix / 'constants/tileset_constants.asm'
|
||||
|
@ -1,12 +1,8 @@
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
|
||||
from parse.map_header import tokenize_line
|
||||
from parse import number
|
||||
|
||||
def tokenize_lines(lines):
|
||||
for line in lines:
|
||||
if 'tileset ' in line:
|
||||
yield tokenize_line(line)
|
||||
from parse.generic import macro_line
|
||||
|
||||
@dataclass
|
||||
class TilesetHeader:
|
||||
@ -25,10 +21,11 @@ class TilesetHeader:
|
||||
def coll(self):
|
||||
return f"{self.name}_Coll"
|
||||
|
||||
tokenize_lines = partial(macro_line.tokenize_lines, prefix="tileset")
|
||||
|
||||
def flatten(tokens):
|
||||
for ts in tokens:
|
||||
if ts[0] != 'tileset':
|
||||
continue
|
||||
assert ts[0] == 'tileset'
|
||||
_, (name, c0, c1, c2, grass_tile, animations) = ts
|
||||
yield TilesetHeader(
|
||||
name=name,
|
||||
|
Loading…
x
Reference in New Issue
Block a user