19 lines
533 B
Python

from parse.generic import tokenize
from parse.generic.flatten import flatten
def tokenize_lines(lines):
for line in lines:
if '::' in line:
yield tokenize.block(line, delim='::')
def parse(prefix):
path = prefix / 'gfx/sprites.asm'
with open(path) as f:
tokens = tokenize_lines(f.read().split('\n'))
l = list(flatten(tokens,
endings=['Sprite'],
base_path='gfx/'))
d = dict(l)
assert len(l) == len(d)
return d