44 lines
1.3 KiB
Python
44 lines
1.3 KiB
Python
def tokenize_block(line, delim):
|
|
name_args = line.split(delim)
|
|
if len(name_args) == 1:
|
|
name, = name_args
|
|
return (name.split(';')[0].strip(),)
|
|
else:
|
|
name, args = name_args
|
|
if args.strip():
|
|
_, path = args.strip().split(' ')
|
|
return name, path.strip('"')
|
|
else:
|
|
return (name,)
|
|
|
|
def tokenize_lines(lines):
|
|
for line in lines:
|
|
if '_Blocks:' in line:
|
|
yield tokenize_block(line, delim=':')
|
|
|
|
def flatten(tokens, endings, base_path):
|
|
stack = []
|
|
for name_path in tokens:
|
|
if len(name_path) == 2:
|
|
name, path = name_path
|
|
stack.append(name)
|
|
for s_name in stack:
|
|
assert any(s_name.endswith(e) for e in endings), (s_name, endings)
|
|
assert path.startswith(base_path), path
|
|
yield s_name, path
|
|
stack = []
|
|
elif len(name_path) == 1:
|
|
stack.append(name_path[0])
|
|
else:
|
|
assert False, name_path
|
|
|
|
def parse(prefix):
|
|
with open(prefix / 'maps.asm') as f:
|
|
tokens = tokenize_lines(f.read().split('\n'))
|
|
l = list(flatten(tokens,
|
|
endings=['_Blocks'],
|
|
base_path='maps/'))
|
|
d = dict(l)
|
|
assert len(d) == len(l)
|
|
return d
|