tools/parse: reorganize parsers

This commit is contained in:
Zack Buhman 2023-08-01 01:23:27 +00:00
parent f4aad91349
commit ac43946728
32 changed files with 254 additions and 149 deletions

View File

@ -11,7 +11,7 @@ def extern_collision_tile_ids():
for name, index in sorted_tilesets_constants_list():
tileset_header = parse.tileset_headers_list()[index]
coll_path = tileset_header.coll()
tile_ids = parse.collision_tile_ids_list()[coll_path]
tile_ids = parse.tileset_collision_tile_ids_list()[coll_path]
yield f"extern uint8_t {coll_path}[{len(tile_ids)}];"
def generate_header():
@ -23,7 +23,7 @@ def generate_header():
def collision_array(name, index):
tileset_header = parse.tileset_headers_list()[index]
coll_path = tileset_header.coll()
tile_ids = parse.collision_tile_ids_list()[coll_path]
tile_ids = parse.tileset_collision_tile_ids_list()[coll_path]
yield f"uint8_t {coll_path}[] = {{"
yield " ".join(
f"{tile_ix},"

View File

@ -19,7 +19,7 @@ from generate.sort import default_sort
from generate.binary import binary_res, start_size_value
from generate.generate import renderer
from parse.map_header import Connection
from parse.map.headers import Connection
directions = sorted(['north', 'south', 'east', 'west'])
@ -42,7 +42,7 @@ def includes_header():
yield '#include "tilesets.hpp"'
yield ""
for map_header in sorted_map_headers():
block_path = parse.maps_blocks_list()[map_header.blocks()]
block_path = parse.map_blocks_list()[map_header.blocks()]
yield f'#include "../res/{block_path}.h"'
yield ""
@ -118,7 +118,7 @@ def text_pointers(map_header):
yield f".text_pointers = &{map_header.text_pointers()}[0],"
def map(map_header):
block_path = parse.maps_blocks_list()[map_header.blocks()]
block_path = parse.map_blocks_list()[map_header.blocks()]
map_constant = parse.map_constants_list()[map_header.name2]
return [
f"[map_t::{map_header.name2.lower()}] = {{",

View File

@ -14,7 +14,7 @@ from generate.binary import binary_res, start_size_value
from generate.generate import renderer
def sorted_sprite_constants_list():
return sorted(parse.sprite_constants_list().items(), key=default_sort)
return sorted(parse.spritesheet_constants_list().items(), key=default_sort)
def includes_header():
yield '#pragma once'
@ -25,8 +25,8 @@ def includes_header():
if name == 'SPRITE_NONE':
continue
assert index != 0, index
spritesheet = parse.spritesheets_list()[index - 1]
sprite_path = parse.gfx_sprites_list()[spritesheet.name]
spritesheet = parse.spritesheet_spritesheets_list()[index - 1]
sprite_path = parse.spritesheet_gfx_list()[spritesheet.name]
yield f'#include "../res/{sprite_path}.h"'
def includes_source():
@ -76,8 +76,8 @@ def sprite(name, index):
else:
# spritesheets_list does not include SPRITE_NULL at index 0
assert index != 0, index
spritesheet = parse.spritesheets_list()[index - 1]
sprite_path = parse.gfx_sprites_list()[spritesheet.name]
spritesheet = parse.spritesheet_spritesheets_list()[index - 1]
sprite_path = parse.spritesheet_gfx_list()[spritesheet.name]
sprite_count = spritesheet.sprite_count
return [
f"[spritesheet_t::{sprite_name(name)}] = {{",

View File

@ -16,8 +16,8 @@ def includes_header():
tileset_index = parse.tileset_constants_list()[tileset_name]
tileset_header = parse.tileset_headers_list()[tileset_index]
blockset_path = parse.gfx_tilesets_list()[tileset_header.blockset()]
gfx_path = parse.gfx_tilesets_list()[tileset_header.gfx()]
blockset_path = parse.tileset_gfx_list()[tileset_header.blockset()]
gfx_path = parse.tileset_gfx_list()[tileset_header.gfx()]
yield f'#include "../res/{blockset_path}.h"'
yield f'#include "../res/{gfx_path}.h"'
@ -55,8 +55,8 @@ def generate_header():
def blockset_tileset(name, index):
tileset_header = parse.tileset_headers_list()[index]
blockset_path = parse.gfx_tilesets_list()[tileset_header.blockset()]
gfx_path = parse.gfx_tilesets_list()[tileset_header.gfx()]
blockset_path = parse.tileset_gfx_list()[tileset_header.blockset()]
gfx_path = parse.tileset_gfx_list()[tileset_header.gfx()]
coll_path = tileset_header.coll()
return [

View File

@ -1,16 +1,18 @@
from functools import partial
from parse.generic import macro_line
tokenize_lines = partial(macro_line.tokenize_lines, prefix='const ')
from parse.generic import tokenize
def flatten(tokens):
index = 0
for t in tokens:
assert t[0] == 'const', t
yield t[1][0], index
_, (name,) = t
yield name, index
index += 1
def parse(prefix):
path = prefix / 'constants/tileset_constants.asm'
tokenize_lines = partial(tokenize.lines, prefix='const ')
def parse(prefix, path):
path = prefix / path
with open(path) as f:
return dict(flatten(tokenize_lines(f.read().split('\n'))))

View File

@ -0,0 +1,23 @@
def flatten(tokens, endings, base_path):
"""
Used by
- parse.map.blocks
- parse.map.objects
- parse.tileset.gfx
- parse.spriteseheet.gfx
"""
stack = []
for name_path in tokens:
if len(name_path) == 2:
name, path = name_path
stack.append(name)
for s_name in stack:
assert any(s_name.endswith(e) for e in endings), (s_name, endings)
assert path.startswith(base_path), path
yield s_name, path
stack = []
elif len(name_path) == 1:
stack.append(name_path[0])
else:
assert False, name_path

View File

@ -1,11 +1,13 @@
def tokenize_params(params):
def params(params):
for param in params:
if '|' in param:
yield [p.strip() for p in param.split(' |')]
else:
yield param.strip()
def tokenize_line(line):
tokenize_params = params
def line(line):
line = line.split(';')[0].strip()
key_params = line.split(' ', maxsplit=1)
if len(key_params) == 1:
@ -15,8 +17,23 @@ def tokenize_line(line):
params = [p.strip() for p in params.split(',')]
return key, list(tokenize_params(params))
def tokenize_lines(lines, prefix=""):
tokenize_line = line
def lines(lines, prefix=""):
for line in filter(bool, lines):
line = line.strip()
if line.startswith(prefix):
yield tokenize_line(line)
def block(line, delim):
name_args = line.split(delim)
if len(name_args) == 1:
name, = name_args
return (name.split(';')[0].strip(),)
else:
name, args = name_args
if args.strip():
_, path = args.strip().split(' ')
return name, path.strip('"')
else:
return (name,)

View File

@ -0,0 +1,5 @@
from parse.map import constants
from parse.map import headers
from parse.map import blocks
from parse.map import objects
from parse.map import hidden_objects

17
tools/parse/map/blocks.py Normal file
View File

@ -0,0 +1,17 @@
from parse.generic import tokenize
from parse.generic.flatten import flatten
def tokenize_lines(lines):
for line in lines:
if '_Blocks:' in line:
yield tokenize.block(line, delim=':')
def parse(prefix):
with open(prefix / 'maps.asm') as f:
tokens = tokenize_lines(f.read().split('\n'))
l = list(flatten(tokens,
endings=['_Blocks'],
base_path='maps/'))
d = dict(l)
assert len(d) == len(l)
return d

View File

@ -1,9 +1,9 @@
from dataclasses import dataclass
from functools import partial
from parse.generic import macro_line
from parse.generic import tokenize
tokenize_lines = partial(macro_line.tokenize_lines, prefix="map_const")
tokenize_lines = partial(tokenize.lines, prefix="map_const")
@dataclass
class MapConstant:

View File

@ -1,6 +1,6 @@
from dataclasses import dataclass
from parse.generic import macro_line
from parse.generic import tokenize
# data/maps/headers/AgathasRoom.asm
@ -82,11 +82,9 @@ def flatten(tokens):
key=lambda c: c.name))
)
tokenize_lines = macro_line.tokenize_lines
def parse(path):
with open(path) as f:
tokens = list(tokenize_lines(f.read().split('\n')))
tokens = list(tokenize.lines(f.read().split('\n')))
return flatten(tokens)
def parse_all(prefix):

View File

@ -1,7 +1,7 @@
from dataclasses import dataclass
from parse import number
from parse.generic import macro_line
from parse.generic import number
from parse.generic import tokenize
def tokenize_label(line):
return line.split(':')[0].strip()
@ -11,9 +11,9 @@ def tokenize_lines(lines):
if line.strip().endswith(':'):
yield (tokenize_label(line),)
elif ('dw ' in line or 'db ' in line) and ' \\' not in line:
yield macro_line.tokenize_line(line)
yield tokenize.line(line)
elif 'hidden_object' in line or 'hidden_text_predef' in line:
yield macro_line.tokenize_line(line)
yield tokenize.line(line)
def flatten0(tokens):
stack = []

View File

@ -1,7 +1,8 @@
from dataclasses import dataclass
from parse import number
from parse.generic import macro_line
from parse.generic import number
from parse.generic import tokenize
from parse.generic.flatten import flatten
@dataclass
class ObjectEvent:
@ -36,7 +37,7 @@ def tokenize_label(line):
return ('label', line.split(':')[0].strip())
def tokenize_event(line):
return list(macro_line.tokenize_line(line))
return list(tokenize.line(line))
def tokenize_border_block(line):
return ('border_block', number.parse(line.strip().split()[1]))

View File

@ -1,43 +0,0 @@
def tokenize_block(line, delim):
name_args = line.split(delim)
if len(name_args) == 1:
name, = name_args
return (name.split(';')[0].strip(),)
else:
name, args = name_args
if args.strip():
_, path = args.strip().split(' ')
return name, path.strip('"')
else:
return (name,)
def tokenize_lines(lines):
for line in lines:
if '_Blocks:' in line:
yield tokenize_block(line, delim=':')
def flatten(tokens, endings, base_path):
stack = []
for name_path in tokens:
if len(name_path) == 2:
name, path = name_path
stack.append(name)
for s_name in stack:
assert any(s_name.endswith(e) for e in endings), (s_name, endings)
assert path.startswith(base_path), path
yield s_name, path
stack = []
elif len(name_path) == 1:
stack.append(name_path[0])
else:
assert False, name_path
def parse(prefix):
with open(prefix / 'maps.asm') as f:
tokens = tokenize_lines(f.read().split('\n'))
l = list(flatten(tokens,
endings=['_Blocks'],
base_path='maps/'))
d = dict(l)
assert len(d) == len(l)
return d

View File

@ -1,17 +1,4 @@
from functools import partial
from parse.generic import macro_line
from parse.generic import constants
tokenize_lines = partial(macro_line.tokenize_lines, prefix='const ')
def flatten(tokens):
index = 0
for t in tokens:
assert t[0] == 'const', t
_, (name,) = t
yield name, index
index += 1
def parse(prefix):
path = prefix / 'constants/move_constants.asm'
with open(path) as f:
return dict(flatten(tokenize_lines(f.read().split('\n'))))
partial(constants.parse, path='constants/move_constants.asm')

View File

@ -2,25 +2,25 @@ import sys
from pathlib import Path
from parse import map_header
from parse import maps_blocks
from parse import tileset_constants
from parse import tileset_headers
from parse import gfx_tilesets
from parse import collision_tile_ids
from parse import map_objects
from parse import hidden_objects
from parse import map_constants
from parse import map # constants
# headers
# blocks
# objects
# hidden_objects
from parse import tileset # constants
# headers
# gfx
# collision_tile_ids
from parse import spritesheet # constants
# spritesheets
# gfx
from parse import gfx_sprites
from parse import spritesheets
from parse import sprite_constants
from parse import text
from parse import scripts
prefix = Path(sys.argv[1])
def memoize(f):
value = None
def inner():
@ -30,29 +30,34 @@ def memoize(f):
return value
return inner
map_constants_list = memoize(lambda: map_constants.parse(prefix))
map_headers = memoize(lambda: map_header.parse_all(prefix))
maps_blocks_list = memoize(lambda: maps_blocks.parse(prefix))
map_objects_list = memoize(lambda: map_objects.parse_all(prefix))
prefix = Path(sys.argv[1])
tileset_constants_list = memoize(lambda: tileset_constants.parse(prefix))
tileset_headers_list = memoize(lambda: tileset_headers.parse(prefix))
gfx_tilesets_list = memoize(lambda: gfx_tilesets.parse(prefix))
collision_tile_ids_list = memoize(lambda: collision_tile_ids.parse(prefix))
hidden_objects_list = memoize(lambda: hidden_objects.parse(prefix))
# map
map_constants_list = memoize(lambda: map.constants.parse(prefix))
map_headers = memoize(lambda: map.headers.parse_all(prefix))
map_blocks_list = memoize(lambda: map.blocks.parse(prefix))
map_objects_list = memoize(lambda: map.objects.parse_all(prefix))
map_hidden_objects_list = memoize(lambda: map.hidden_objects.parse(prefix))
# tileset
tileset_constants_list = memoize(lambda: tileset.constants.parse(prefix))
tileset_headers_list = memoize(lambda: tileset.headers.parse(prefix))
tileset_gfx_list = memoize(lambda: tileset.gfx.parse(prefix))
tileset_collision_tile_ids_list = memoize(lambda: tileset.collision_tile_ids.parse(prefix))
# sprites
spritesheet_gfx_list = memoize(lambda: spritesheet.gfx.parse(prefix))
spritesheet_spritesheets_list = memoize(lambda: spritesheet.spritesheets.parse(prefix))
spritesheet_constants_list = memoize(lambda: spritesheet.constants.parse(prefix))
# text
scripts_list = memoize(lambda: scripts.parse_all(prefix))
text_list = memoize(lambda: text.parse_all(prefix))
# need:
#data/tilesets/pair_collision_tile_ids.asm
#cut_tree_blocks.asm
# home/vcopy: animations
# sprites
gfx_sprites_list = memoize(lambda: gfx_sprites.parse(prefix))
spritesheets_list = memoize(lambda: spritesheets.parse(prefix))
sprite_constants_list = memoize(lambda: sprite_constants.parse(prefix))
# text
scripts_list = memoize(lambda: scripts.parse_all(prefix))
text_list = memoize(lambda: text.parse_all(prefix))

79
tools/parse/pokemon.py Normal file
View File

@ -0,0 +1,79 @@
# gfx/pics.asm:
"""
SquirtlePicFront:: INCBIN "gfx/pokemon/front/squirtle.pic"
SquirtlePicBack:: INCBIN "gfx/pokemon/back/squirtleb.pic"
"""
# data/pokemon/base_stats/*.asm
"""
db DEX_TAUROS ; pokedex id
db 75, 100, 95, 110, 70
; hp atk def spd spc
db NORMAL, NORMAL ; type
db 45 ; catch rate
db 211 ; base exp
INCBIN "gfx/pokemon/front/tauros.pic", 0, 1 ; sprite dimensions
dw TaurosPicFront, TaurosPicBack
db TACKLE, NO_MOVE, NO_MOVE, NO_MOVE ; level 1 learnset
db GROWTH_SLOW ; growth rate
; tm/hm learnset
tmhm TOXIC, HORN_DRILL, BODY_SLAM, TAKE_DOWN, DOUBLE_EDGE, \
ICE_BEAM, BLIZZARD, HYPER_BEAM, RAGE, THUNDERBOLT, \
THUNDER, EARTHQUAKE, FISSURE, MIMIC, DOUBLE_TEAM, \
BIDE, FIRE_BLAST, SKULL_BASH, REST, SUBSTITUTE, \
STRENGTH
; end
db 0 ; padding
"""
# ./data/pokemon/evos_moves.asm
# ordered by pokemon_constants
"""
EvosMovesPointerTable:
table_width 2, EvosMovesPointerTable
dw RhydonEvosMoves
dw KangaskhanEvosMoves
dw NidoranMEvosMoves
dw ClefairyEvosMoves
....
OddishEvosMoves:
; Evolutions
db EV_LEVEL, 21, GLOOM
db 0
; Learnset
db 15, POISONPOWDER
db 17, STUN_SPORE
db 19, SLEEP_POWDER
db 24, ACID
db 33, PETAL_DANCE
db 46, SOLARBEAM
db 0
"""
# constants/pokemon_constants.asm
"""
const_def
const NO_MON ; $00
const RHYDON ; $01
const KANGASKHAN ; $02
const NIDORAN_M ; $03
"""
# data/pokemon/dex_entries.asm
# data/pokemon/dex_order.asm
# data/pokemon/names.asm
# data/wild/grass_water.asm
# WildDataPointers
# data/wild/maps/Route1.asm
# Route1WildMons

View File

@ -0,0 +1,4 @@
from functools import partial
from parse.generic import constants
partial(constants.parse, path='constants/pokemon_constants.asm')

View File

@ -1,4 +1,4 @@
from parse.line import next_line, skip_whitespace
from parse.generic.line import next_line, skip_whitespace
def parse_dw_const(line):
dw_const, args = line.split(maxsplit=1)

View File

@ -1,6 +0,0 @@
from parse.tileset_constants import flatten, tokenize_lines
def parse(prefix):
path = prefix / 'constants/sprite_constants.asm'
with open(path) as f:
return dict(flatten(tokenize_lines(f.read().split('\n'))))

View File

@ -0,0 +1,3 @@
from parse.spritesheet import constants
from parse.spritesheet import spritesheets
from parse.spritesheet import gfx

View File

@ -0,0 +1,4 @@
from functools import partial
from parse.generic import constants
parse = partial(constants.parse, path='constants/sprite_constants.asm')

View File

@ -1,9 +1,10 @@
from parse.maps_blocks import tokenize_block, flatten
from parse.generic import tokenize
from parse.generic.flatten import flatten
def tokenize_lines(lines):
for line in lines:
if '::' in line:
yield tokenize_block(line, delim='::')
yield tokenize.block(line, delim='::')
def parse(prefix):
path = prefix / 'gfx/sprites.asm'

View File

@ -1,9 +1,9 @@
from dataclasses import dataclass
from functools import partial
from parse import number
from parse.generic import macro_line
from parse.generic import number
from parse.generic import tokenize
tokenize_lines = partial(macro_line.tokenize_lines, prefix='overworld_sprite')
tokenize_lines = partial(tokenize.lines, prefix='overworld_sprite')
@dataclass
class Spritesheet:

View File

@ -1,6 +1,6 @@
from itertools import chain
from parse.line import next_line, skip_whitespace
from parse.generic.line import next_line, skip_whitespace
def parse_label(lines):
lines, line = next_line(lines)

View File

@ -0,0 +1,4 @@
from parse.tileset import constants
from parse.tileset import headers
from parse.tileset import gfx
from parse.tileset import collision_tile_ids

View File

@ -1,15 +1,14 @@
from functools import partial
from parse.maps_blocks import tokenize_block
from parse import number
from parse.generic import macro_line
from parse.generic import tokenize
from parse.generic import number
def tokenize_lines(lines):
for line in lines:
if '_Coll:' in line:
yield tokenize_block(line, delim='::')
yield tokenize.block(line, delim='::')
elif 'coll_tiles' in line:
tokens, = tokenize_block(line, delim='::')
yield macro_line.tokenize_line(tokens)
tokens, = tokenize.block(line, delim='::')
yield tokenize.line(tokens)
def flatten(tokens):
stack = []

View File

@ -0,0 +1,4 @@
from functools import partial
from parse.generic import constants
parse = partial(constants.parse, path='constants/tileset_constants.asm')

View File

@ -1,9 +1,10 @@
from parse.maps_blocks import tokenize_block, flatten
from parse.generic import tokenize
from parse.generic.flatten import flatten
def tokenize_lines(lines):
for line in lines:
if '_GFX:' in line or '_Block:' in line:
yield tokenize_block(line, delim='::')
yield tokenize.block(line, delim='::')
def parse(prefix):
path = prefix / 'gfx/tilesets.asm'

View File

@ -1,8 +1,8 @@
from dataclasses import dataclass
from functools import partial
from parse import number
from parse.generic import macro_line
from parse.generic import number
from parse.generic import tokenize
@dataclass
class TilesetHeader:
@ -21,7 +21,7 @@ class TilesetHeader:
def coll(self):
return f"{self.name}_Coll"
tokenize_lines = partial(macro_line.tokenize_lines, prefix="tileset")
tokenize_lines = partial(tokenize.lines, prefix="tileset")
def flatten(tokens):
for ts in tokens: