from functools import partial from parse.generic import tokenize def flatten(tokens): for t in tokens: if t[0] == 'const': _, (name,) = t yield name elif t[0] == 'const_skip': yield None elif t[0] == 'const_def': continue else: assert False, t tokenize_lines = partial(tokenize.lines, prefix='const') def parse(prefix, path): path = prefix / path with open(path) as f: return list(flatten(tokenize_lines(f.read().split('\n'))))