33 lines
973 B
Python
33 lines
973 B
Python
from parse.maps_blocks import tokenize_block
|
|
from parse.map_header import tokenize_line
|
|
from parse import number
|
|
|
|
def tokenize_lines(lines):
|
|
for line in lines:
|
|
if '_Coll:' in line:
|
|
yield tokenize_block(line, delim='::')
|
|
elif 'coll_tiles' in line:
|
|
tokens, = tokenize_block(line, delim='::')
|
|
yield tokenize_line(tokens)
|
|
|
|
def flatten(tokens):
|
|
stack = []
|
|
for t in tokens:
|
|
if t[0] == 'coll_tiles':
|
|
tile_ids = t[1] if len(t) == 2 else []
|
|
for name in stack:
|
|
yield name, list(map(number.parse, tile_ids))
|
|
stack = []
|
|
elif len(t) == 1:
|
|
name, = t
|
|
stack.append(name)
|
|
|
|
def parse(prefix):
|
|
path = prefix / 'data/tilesets/collision_tile_ids.asm'
|
|
with open(path) as f:
|
|
tokens = tokenize_lines(f.read().split('\n'))
|
|
l = list(flatten(tokens))
|
|
d = dict(l)
|
|
assert len(l) == len(d)
|
|
return d
|