18 lines
493 B
Python
18 lines
493 B
Python
from parse.generic import tokenize
|
|
from parse.generic.flatten import flatten
|
|
|
|
def tokenize_lines(lines):
|
|
for line in lines:
|
|
if '_Blocks:' in line:
|
|
yield tokenize.block(line, delim=':')
|
|
|
|
def parse(prefix):
|
|
with open(prefix / 'maps.asm') as f:
|
|
tokens = tokenize_lines(f.read().split('\n'))
|
|
l = list(flatten(tokens,
|
|
endings=['_Blocks'],
|
|
base_path='maps/'))
|
|
d = dict(l)
|
|
assert len(d) == len(l)
|
|
return d
|