parse: move tokenize_line/tokenize_lines to a new module
At times I forget what I have already written. I hope that this might help keep things more organized.
This commit is contained in:
parent
60c346406c
commit
a921a44b66
@ -1,6 +1,7 @@
|
|||||||
|
from functools import partial
|
||||||
from parse.maps_blocks import tokenize_block
|
from parse.maps_blocks import tokenize_block
|
||||||
from parse.map_header import tokenize_line
|
|
||||||
from parse import number
|
from parse import number
|
||||||
|
from parse.generic import macro_line
|
||||||
|
|
||||||
def tokenize_lines(lines):
|
def tokenize_lines(lines):
|
||||||
for line in lines:
|
for line in lines:
|
||||||
@ -8,7 +9,7 @@ def tokenize_lines(lines):
|
|||||||
yield tokenize_block(line, delim='::')
|
yield tokenize_block(line, delim='::')
|
||||||
elif 'coll_tiles' in line:
|
elif 'coll_tiles' in line:
|
||||||
tokens, = tokenize_block(line, delim='::')
|
tokens, = tokenize_block(line, delim='::')
|
||||||
yield tokenize_line(tokens)
|
yield macro_line.tokenize_line(tokens)
|
||||||
|
|
||||||
def flatten(tokens):
|
def flatten(tokens):
|
||||||
stack = []
|
stack = []
|
||||||
|
22
tools/parse/generic/macro_line.py
Normal file
22
tools/parse/generic/macro_line.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
def tokenize_params(params):
|
||||||
|
for param in params:
|
||||||
|
if '|' in param:
|
||||||
|
yield [p.strip() for p in param.split(' |')]
|
||||||
|
else:
|
||||||
|
yield param.strip()
|
||||||
|
|
||||||
|
def tokenize_line(line):
|
||||||
|
line = line.split(';')[0].strip()
|
||||||
|
key_params = line.split(' ', maxsplit=1)
|
||||||
|
if len(key_params) == 1:
|
||||||
|
return tuple(key_params)
|
||||||
|
else:
|
||||||
|
key, params = key_params
|
||||||
|
params = [p.strip() for p in params.split(',')]
|
||||||
|
return key, list(tokenize_params(params))
|
||||||
|
|
||||||
|
def tokenize_lines(lines, prefix=""):
|
||||||
|
for line in filter(bool, lines):
|
||||||
|
line = line.strip()
|
||||||
|
if line.startswith(prefix):
|
||||||
|
yield tokenize_line(line)
|
@ -1,7 +1,7 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from parse import number
|
from parse import number
|
||||||
from parse import map_header
|
from parse.generic import macro_line
|
||||||
|
|
||||||
def tokenize_label(line):
|
def tokenize_label(line):
|
||||||
return line.split(':')[0].strip()
|
return line.split(':')[0].strip()
|
||||||
@ -11,9 +11,9 @@ def tokenize_lines(lines):
|
|||||||
if line.strip().endswith(':'):
|
if line.strip().endswith(':'):
|
||||||
yield (tokenize_label(line),)
|
yield (tokenize_label(line),)
|
||||||
elif ('dw ' in line or 'db ' in line) and ' \\' not in line:
|
elif ('dw ' in line or 'db ' in line) and ' \\' not in line:
|
||||||
yield map_header.tokenize_line(line)
|
yield macro_line.tokenize_line(line)
|
||||||
elif 'hidden_object' in line or 'hidden_text_predef' in line:
|
elif 'hidden_object' in line or 'hidden_text_predef' in line:
|
||||||
yield map_header.tokenize_line(line)
|
yield macro_line.tokenize_line(line)
|
||||||
|
|
||||||
def flatten0(tokens):
|
def flatten0(tokens):
|
||||||
stack = []
|
stack = []
|
||||||
|
@ -1,14 +1,9 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
from parse.map_header import tokenize_line
|
from parse.generic import macro_line
|
||||||
|
|
||||||
def tokenize_map_const(line):
|
tokenize_lines = partial(macro_line.tokenize_lines, prefix="map_const")
|
||||||
return tokenize_line(line)
|
|
||||||
|
|
||||||
def tokenize_lines(lines):
|
|
||||||
for line in lines:
|
|
||||||
if "map_const" in line:
|
|
||||||
yield tokenize_map_const(line)
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MapConstant:
|
class MapConstant:
|
||||||
@ -17,12 +12,12 @@ class MapConstant:
|
|||||||
|
|
||||||
def flatten(tokens):
|
def flatten(tokens):
|
||||||
for macro, args in tokens:
|
for macro, args in tokens:
|
||||||
if macro == 'map_const':
|
assert macro == 'map_const', (macro, args)
|
||||||
name, width, height = args
|
name, width, height = args
|
||||||
yield name, MapConstant(
|
yield name, MapConstant(
|
||||||
int(width),
|
int(width),
|
||||||
int(height)
|
int(height)
|
||||||
)
|
)
|
||||||
|
|
||||||
def parse(prefix):
|
def parse(prefix):
|
||||||
path = prefix / "constants/map_constants.asm"
|
path = prefix / "constants/map_constants.asm"
|
||||||
|
@ -1,28 +1,9 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from parse.generic import macro_line
|
||||||
|
|
||||||
# data/maps/headers/AgathasRoom.asm
|
# data/maps/headers/AgathasRoom.asm
|
||||||
|
|
||||||
def tokenize_params(params):
|
|
||||||
for param in params:
|
|
||||||
if '|' in param:
|
|
||||||
yield [p.strip() for p in param.split(' |')]
|
|
||||||
else:
|
|
||||||
yield param.strip()
|
|
||||||
|
|
||||||
def tokenize_line(line):
|
|
||||||
line = line.split(';')[0].strip()
|
|
||||||
key_params = line.split(' ', maxsplit=1)
|
|
||||||
if len(key_params) == 1:
|
|
||||||
return tuple(key_params)
|
|
||||||
else:
|
|
||||||
key, params = key_params
|
|
||||||
params = [p.strip() for p in params.split(',')]
|
|
||||||
return key, list(tokenize_params(params))
|
|
||||||
|
|
||||||
def tokenize_lines(lines):
|
|
||||||
for line in filter(bool, lines):
|
|
||||||
yield tokenize_line(line)
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MapHeader:
|
class MapHeader:
|
||||||
name1: str
|
name1: str
|
||||||
@ -101,6 +82,8 @@ def flatten(tokens):
|
|||||||
key=lambda c: c.name))
|
key=lambda c: c.name))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
tokenize_lines = macro_line.tokenize_lines
|
||||||
|
|
||||||
def parse(path):
|
def parse(path):
|
||||||
with open(path) as f:
|
with open(path) as f:
|
||||||
tokens = list(tokenize_lines(f.read().split('\n')))
|
tokens = list(tokenize_lines(f.read().split('\n')))
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from parse import number
|
from parse import number
|
||||||
from parse.map_header import tokenize_line
|
from parse.generic import macro_line
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ObjectEvent:
|
class ObjectEvent:
|
||||||
@ -36,7 +36,7 @@ def tokenize_label(line):
|
|||||||
return ('label', line.split(':')[0].strip())
|
return ('label', line.split(':')[0].strip())
|
||||||
|
|
||||||
def tokenize_event(line):
|
def tokenize_event(line):
|
||||||
return list(tokenize_line(line))
|
return list(macro_line.tokenize_line(line))
|
||||||
|
|
||||||
def tokenize_border_block(line):
|
def tokenize_border_block(line):
|
||||||
return ('border_block', number.parse(line.strip().split()[1]))
|
return ('border_block', number.parse(line.strip().split()[1]))
|
||||||
|
@ -1,12 +1,9 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from functools import partial
|
||||||
from parse.map_header import tokenize_line
|
|
||||||
from parse import number
|
from parse import number
|
||||||
|
from parse.generic import macro_line
|
||||||
|
|
||||||
def tokenize_lines(lines):
|
tokenize_lines = partial(macro_line.tokenize_lines, prefix='overworld_sprite')
|
||||||
for line in lines:
|
|
||||||
if 'overworld_sprite ' in line:
|
|
||||||
yield tokenize_line(line)
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Spritesheet:
|
class Spritesheet:
|
||||||
|
@ -1,17 +1,14 @@
|
|||||||
from parse.map_header import tokenize_line
|
from functools import partial
|
||||||
|
from parse.generic import macro_line
|
||||||
def tokenize_lines(lines):
|
|
||||||
for line in lines:
|
|
||||||
if 'const' in line:
|
|
||||||
yield tokenize_line(line)
|
|
||||||
|
|
||||||
|
tokenize_lines = partial(macro_line.tokenize_lines, prefix='const ')
|
||||||
|
|
||||||
def flatten(tokens):
|
def flatten(tokens):
|
||||||
index = 0
|
index = 0
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
if t[0] == 'const':
|
assert t[0] == 'const', t
|
||||||
yield t[1][0], index
|
yield t[1][0], index
|
||||||
index += 1
|
index += 1
|
||||||
|
|
||||||
def parse(prefix):
|
def parse(prefix):
|
||||||
path = prefix / 'constants/tileset_constants.asm'
|
path = prefix / 'constants/tileset_constants.asm'
|
||||||
|
@ -1,12 +1,8 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
from parse.map_header import tokenize_line
|
|
||||||
from parse import number
|
from parse import number
|
||||||
|
from parse.generic import macro_line
|
||||||
def tokenize_lines(lines):
|
|
||||||
for line in lines:
|
|
||||||
if 'tileset ' in line:
|
|
||||||
yield tokenize_line(line)
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class TilesetHeader:
|
class TilesetHeader:
|
||||||
@ -25,10 +21,11 @@ class TilesetHeader:
|
|||||||
def coll(self):
|
def coll(self):
|
||||||
return f"{self.name}_Coll"
|
return f"{self.name}_Coll"
|
||||||
|
|
||||||
|
tokenize_lines = partial(macro_line.tokenize_lines, prefix="tileset")
|
||||||
|
|
||||||
def flatten(tokens):
|
def flatten(tokens):
|
||||||
for ts in tokens:
|
for ts in tokens:
|
||||||
if ts[0] != 'tileset':
|
assert ts[0] == 'tileset'
|
||||||
continue
|
|
||||||
_, (name, c0, c1, c2, grass_tile, animations) = ts
|
_, (name, c0, c1, c2, grass_tile, animations) = ts
|
||||||
yield TilesetHeader(
|
yield TilesetHeader(
|
||||||
name=name,
|
name=name,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user