pokemon/tools/parse/generic/tokenize.py

45 lines
1.2 KiB
Python

def params(params):
for param in params:
if '|' in param:
yield [p.strip() for p in param.split(' |')]
else:
yield param.strip()
tokenize_params = params
def line(line):
line = line.split(';')[0].strip()
key_params = line.split(' ', maxsplit=1)
if len(key_params) == 1:
return tuple(key_params)
else:
key, params = key_params
params = [p.strip() for p in params.split(',')]
return key, list(tokenize_params(params))
tokenize_line = line
def lines(lines, prefix=""):
accumulator = ""
for line in filter(bool, lines):
line = line.strip()
if line.startswith(prefix):
if line.endswith('\\'):
accumulator += line[:-1].strip()
else:
yield tokenize_line(accumulator + line)
accumulator = ""
def block(line, delim):
name_args = line.split(delim)
if len(name_args) == 1:
name, = name_args
return (name.split(';')[0].strip(),)
else:
name, args = name_args
if args.strip():
_, path = args.strip().split(' ')
return name, path.strip('"')
else:
return (name,)