pokemon/tools/parse/map_header.py
Zack Buhman da49dbeb2b Makefile: improve build rules slightly
I'm still not satisfied with the Makefile, but this at least makes it
converge in a single make invocation again.

This also removes the enum value arrays.
2023-07-26 18:17:45 +00:00

113 lines
2.9 KiB
Python

from dataclasses import dataclass
# data/maps/headers/AgathasRoom.asm
def tokenize_params(params):
for param in params:
if '|' in param:
yield [p.strip() for p in param.split(' |')]
else:
yield param.strip()
def tokenize_line(line):
line = line.split(';')[0].strip()
key_params = line.split(' ', maxsplit=1)
if len(key_params) == 1:
return tuple(key_params)
else:
key, params = key_params
params = [p.strip() for p in params.split(',')]
return key, list(tokenize_params(params))
def tokenize_lines(lines):
for line in filter(bool, lines):
yield tokenize_line(line)
@dataclass
class MapHeader:
name1: str
name2: str
tileset: str
connection_names: list[str] # not sure if this one is useful
connections: list[list]
def blocks(self):
return f"{self.name1}_Blocks"
def text_pointers(self):
return f"{self.name1}_TextPointers"
def script(self):
return f"{self.name1}_Script"
def object(self):
return f"{self.name1}_Object"
def width(self):
return f"{self.name2}_WIDTH"
def height(self):
return f"{self.name2}_HEIGHT"
@dataclass
class Connection:
name: str
map_name1: str
map_name2: str
offset: int
def parse_connection():
return
def flatten(tokens):
# expects tokens from a single file
# PalletTown, PALLET_TOWN, OVERWORLD, NORTH | SOUTH
# dw \1_Blocks
# dw \1_TextPointers
# dw \1_Script
# dw {\1_Object}
# \2_WIDTH
# \2_HEIGHT
map_headers = [s for s in tokens if s[0] == 'map_header']
assert len(map_headers) == 1
map_header, = map_headers
_, (name1, name2, tileset, connection_mask0) = map_header
connections = [s[1] for s in tokens if s[0] == 'connection']
connection_mask = connection_mask0 if type(connection_mask0) is list else [connection_mask0]
null_mask = lambda m: (
len(m) == 1 and any(m[0] == n for n in {'0', '$0', '$00'})
)
connection_names = (
[] if null_mask(connection_mask) else connection_mask
)
return MapHeader(
name1 = name1,
name2 = name2,
tileset = tileset,
connection_names = sorted(connection_names),
connections = list(sorted(
(
Connection(
name,
map_name1,
map_name2,
int(offset)
)
for name, map_name1, map_name2, offset in connections
),
key=lambda c: c.name))
)
def parse(path):
with open(path) as f:
tokens = list(tokenize_lines(f.read().split('\n')))
return flatten(tokens)
def parse_all(prefix):
base_path = prefix / 'data/maps/headers'
paths = [p for p in base_path.iterdir() if p.is_file()]
return [parse(path) for path in paths]