This also improves map_object parsing code. There are still unhandled issues in the parser output related to quirks in the input data.
120 lines
3.1 KiB
Python
120 lines
3.1 KiB
Python
from dataclasses import dataclass
|
|
|
|
from parse import number
|
|
from parse.map_header import tokenize_line
|
|
|
|
@dataclass
|
|
class ObjectEvent:
|
|
type: str
|
|
position: tuple[int, int]
|
|
sprite_id: str
|
|
movement: str
|
|
range_or_direction: str
|
|
text_id: str
|
|
items_id_or_trainer_id_or_pokemon_id: str = None
|
|
trainer_number_or_pokemon_level: str = None
|
|
|
|
@dataclass
|
|
class WarpEvent:
|
|
position: tuple[int, int]
|
|
destination_map: str
|
|
destination_warp_index: str
|
|
|
|
@dataclass
|
|
class BgEvent:
|
|
position: tuple[int, int]
|
|
sign_id: str
|
|
|
|
@dataclass
|
|
class Object:
|
|
border_block: int
|
|
warp_events: list
|
|
object_events: list
|
|
bg_events: list
|
|
|
|
def tokenize_label(line):
|
|
return ('label', line.split(':')[0].strip())
|
|
|
|
def tokenize_event(line):
|
|
return list(tokenize_line(line))
|
|
|
|
def tokenize_border_block(line):
|
|
return ('border_block', number.parse(line.strip().split()[1]))
|
|
|
|
def tokenize_lines(lines):
|
|
for line in lines:
|
|
if "_event " in line:
|
|
yield tokenize_event(line)
|
|
elif ':' in line:
|
|
yield tokenize_label(line)
|
|
elif 'border block' in line:
|
|
# special case where we are parsing a comment
|
|
yield tokenize_border_block(line)
|
|
|
|
def object_id(object_args):
|
|
types = {
|
|
6: "trainer",
|
|
5: "item",
|
|
4: "generic",
|
|
}
|
|
assert len(object_args) in types, object_args
|
|
return types[len(object_args)]
|
|
|
|
def flatten(tokens):
|
|
label = None
|
|
border_block = None
|
|
warp_events = []
|
|
object_events = []
|
|
bg_events = []
|
|
for token_name, args in tokens:
|
|
position = lambda : list(map(number.parse, args[0:2]))
|
|
if token_name == 'label':
|
|
assert label is None
|
|
label = args
|
|
elif token_name == 'object_event':
|
|
object_args = args[2:]
|
|
event = ObjectEvent(
|
|
object_id(object_args),
|
|
position(),
|
|
*object_args
|
|
)
|
|
object_events.append(event)
|
|
elif token_name == 'warp_event':
|
|
destination_map, destination_warp_index = args[2:]
|
|
event = WarpEvent(
|
|
position(),
|
|
destination_map,
|
|
number.parse(destination_warp_index)
|
|
)
|
|
warp_events.append(event)
|
|
elif token_name == 'bg_event':
|
|
event = BgEvent(
|
|
position(),
|
|
*(args[2:])
|
|
)
|
|
bg_events.append(event)
|
|
elif token_name == 'border_block':
|
|
assert border_block is None
|
|
border_block = args
|
|
else:
|
|
assert False, (token_name, args)
|
|
|
|
assert label is not None
|
|
assert border_block is not None
|
|
return label, Object(
|
|
border_block=border_block,
|
|
warp_events = warp_events,
|
|
object_events = object_events,
|
|
bg_events = bg_events,
|
|
)
|
|
|
|
def parse(path):
|
|
with open(path) as f:
|
|
tokens = tokenize_lines(f.read().split('\n'))
|
|
return flatten(tokens)
|
|
|
|
def parse_all(prefix):
|
|
base_path = prefix / 'data/maps/objects'
|
|
paths = (p for p in base_path.iterdir() if p.is_file())
|
|
return dict(parse(path) for path in paths)
|