diff --git a/tools/generate/collision_tile_ids.py b/tools/generate/collision_tile_ids.py index a1de555..e7e130a 100644 --- a/tools/generate/collision_tile_ids.py +++ b/tools/generate/collision_tile_ids.py @@ -11,7 +11,7 @@ def extern_collision_tile_ids(): for name, index in sorted_tilesets_constants_list(): tileset_header = parse.tileset_headers_list()[index] coll_path = tileset_header.coll() - tile_ids = parse.collision_tile_ids_list()[coll_path] + tile_ids = parse.tileset_collision_tile_ids_list()[coll_path] yield f"extern uint8_t {coll_path}[{len(tile_ids)}];" def generate_header(): @@ -23,7 +23,7 @@ def generate_header(): def collision_array(name, index): tileset_header = parse.tileset_headers_list()[index] coll_path = tileset_header.coll() - tile_ids = parse.collision_tile_ids_list()[coll_path] + tile_ids = parse.tileset_collision_tile_ids_list()[coll_path] yield f"uint8_t {coll_path}[] = {{" yield " ".join( f"{tile_ix}," diff --git a/tools/generate/maps.py b/tools/generate/maps.py index 5fb103b..04b6f18 100644 --- a/tools/generate/maps.py +++ b/tools/generate/maps.py @@ -19,7 +19,7 @@ from generate.sort import default_sort from generate.binary import binary_res, start_size_value from generate.generate import renderer -from parse.map_header import Connection +from parse.map.headers import Connection directions = sorted(['north', 'south', 'east', 'west']) @@ -42,7 +42,7 @@ def includes_header(): yield '#include "tilesets.hpp"' yield "" for map_header in sorted_map_headers(): - block_path = parse.maps_blocks_list()[map_header.blocks()] + block_path = parse.map_blocks_list()[map_header.blocks()] yield f'#include "../res/{block_path}.h"' yield "" @@ -118,7 +118,7 @@ def text_pointers(map_header): yield f".text_pointers = &{map_header.text_pointers()}[0]," def map(map_header): - block_path = parse.maps_blocks_list()[map_header.blocks()] + block_path = parse.map_blocks_list()[map_header.blocks()] map_constant = parse.map_constants_list()[map_header.name2] return [ f"[map_t::{map_header.name2.lower()}] = {{", diff --git a/tools/generate/sprites.py b/tools/generate/sprites.py index 6caf6a3..28f6b15 100644 --- a/tools/generate/sprites.py +++ b/tools/generate/sprites.py @@ -14,7 +14,7 @@ from generate.binary import binary_res, start_size_value from generate.generate import renderer def sorted_sprite_constants_list(): - return sorted(parse.sprite_constants_list().items(), key=default_sort) + return sorted(parse.spritesheet_constants_list().items(), key=default_sort) def includes_header(): yield '#pragma once' @@ -25,8 +25,8 @@ def includes_header(): if name == 'SPRITE_NONE': continue assert index != 0, index - spritesheet = parse.spritesheets_list()[index - 1] - sprite_path = parse.gfx_sprites_list()[spritesheet.name] + spritesheet = parse.spritesheet_spritesheets_list()[index - 1] + sprite_path = parse.spritesheet_gfx_list()[spritesheet.name] yield f'#include "../res/{sprite_path}.h"' def includes_source(): @@ -76,8 +76,8 @@ def sprite(name, index): else: # spritesheets_list does not include SPRITE_NULL at index 0 assert index != 0, index - spritesheet = parse.spritesheets_list()[index - 1] - sprite_path = parse.gfx_sprites_list()[spritesheet.name] + spritesheet = parse.spritesheet_spritesheets_list()[index - 1] + sprite_path = parse.spritesheet_gfx_list()[spritesheet.name] sprite_count = spritesheet.sprite_count return [ f"[spritesheet_t::{sprite_name(name)}] = {{", diff --git a/tools/generate/tilesets.py b/tools/generate/tilesets.py index 6cb9e3f..5c16bfc 100644 --- a/tools/generate/tilesets.py +++ b/tools/generate/tilesets.py @@ -16,8 +16,8 @@ def includes_header(): tileset_index = parse.tileset_constants_list()[tileset_name] tileset_header = parse.tileset_headers_list()[tileset_index] - blockset_path = parse.gfx_tilesets_list()[tileset_header.blockset()] - gfx_path = parse.gfx_tilesets_list()[tileset_header.gfx()] + blockset_path = parse.tileset_gfx_list()[tileset_header.blockset()] + gfx_path = parse.tileset_gfx_list()[tileset_header.gfx()] yield f'#include "../res/{blockset_path}.h"' yield f'#include "../res/{gfx_path}.h"' @@ -55,8 +55,8 @@ def generate_header(): def blockset_tileset(name, index): tileset_header = parse.tileset_headers_list()[index] - blockset_path = parse.gfx_tilesets_list()[tileset_header.blockset()] - gfx_path = parse.gfx_tilesets_list()[tileset_header.gfx()] + blockset_path = parse.tileset_gfx_list()[tileset_header.blockset()] + gfx_path = parse.tileset_gfx_list()[tileset_header.gfx()] coll_path = tileset_header.coll() return [ diff --git a/tools/parse/tileset_constants.py b/tools/parse/generic/constants.py similarity index 53% rename from tools/parse/tileset_constants.py rename to tools/parse/generic/constants.py index 825fbe2..a79b43b 100644 --- a/tools/parse/tileset_constants.py +++ b/tools/parse/generic/constants.py @@ -1,16 +1,18 @@ from functools import partial -from parse.generic import macro_line -tokenize_lines = partial(macro_line.tokenize_lines, prefix='const ') +from parse.generic import tokenize def flatten(tokens): index = 0 for t in tokens: assert t[0] == 'const', t - yield t[1][0], index + _, (name,) = t + yield name, index index += 1 -def parse(prefix): - path = prefix / 'constants/tileset_constants.asm' +tokenize_lines = partial(tokenize.lines, prefix='const ') + +def parse(prefix, path): + path = prefix / path with open(path) as f: return dict(flatten(tokenize_lines(f.read().split('\n')))) diff --git a/tools/parse/generic/flatten.py b/tools/parse/generic/flatten.py new file mode 100644 index 0000000..ced4165 --- /dev/null +++ b/tools/parse/generic/flatten.py @@ -0,0 +1,23 @@ +def flatten(tokens, endings, base_path): + """ + Used by + + - parse.map.blocks + - parse.map.objects + - parse.tileset.gfx + - parse.spriteseheet.gfx + """ + stack = [] + for name_path in tokens: + if len(name_path) == 2: + name, path = name_path + stack.append(name) + for s_name in stack: + assert any(s_name.endswith(e) for e in endings), (s_name, endings) + assert path.startswith(base_path), path + yield s_name, path + stack = [] + elif len(name_path) == 1: + stack.append(name_path[0]) + else: + assert False, name_path diff --git a/tools/parse/line.py b/tools/parse/generic/line.py similarity index 100% rename from tools/parse/line.py rename to tools/parse/generic/line.py diff --git a/tools/parse/number.py b/tools/parse/generic/number.py similarity index 100% rename from tools/parse/number.py rename to tools/parse/generic/number.py diff --git a/tools/parse/generic/macro_line.py b/tools/parse/generic/tokenize.py similarity index 55% rename from tools/parse/generic/macro_line.py rename to tools/parse/generic/tokenize.py index 048c95d..9372728 100644 --- a/tools/parse/generic/macro_line.py +++ b/tools/parse/generic/tokenize.py @@ -1,11 +1,13 @@ -def tokenize_params(params): +def params(params): for param in params: if '|' in param: yield [p.strip() for p in param.split(' |')] else: yield param.strip() -def tokenize_line(line): +tokenize_params = params + +def line(line): line = line.split(';')[0].strip() key_params = line.split(' ', maxsplit=1) if len(key_params) == 1: @@ -15,8 +17,23 @@ def tokenize_line(line): params = [p.strip() for p in params.split(',')] return key, list(tokenize_params(params)) -def tokenize_lines(lines, prefix=""): +tokenize_line = line + +def lines(lines, prefix=""): for line in filter(bool, lines): line = line.strip() if line.startswith(prefix): yield tokenize_line(line) + +def block(line, delim): + name_args = line.split(delim) + if len(name_args) == 1: + name, = name_args + return (name.split(';')[0].strip(),) + else: + name, args = name_args + if args.strip(): + _, path = args.strip().split(' ') + return name, path.strip('"') + else: + return (name,) diff --git a/tools/parse/map/__init__.py b/tools/parse/map/__init__.py new file mode 100644 index 0000000..b00f0ab --- /dev/null +++ b/tools/parse/map/__init__.py @@ -0,0 +1,5 @@ +from parse.map import constants +from parse.map import headers +from parse.map import blocks +from parse.map import objects +from parse.map import hidden_objects diff --git a/tools/parse/map/blocks.py b/tools/parse/map/blocks.py new file mode 100644 index 0000000..528b4ff --- /dev/null +++ b/tools/parse/map/blocks.py @@ -0,0 +1,17 @@ +from parse.generic import tokenize +from parse.generic.flatten import flatten + +def tokenize_lines(lines): + for line in lines: + if '_Blocks:' in line: + yield tokenize.block(line, delim=':') + +def parse(prefix): + with open(prefix / 'maps.asm') as f: + tokens = tokenize_lines(f.read().split('\n')) + l = list(flatten(tokens, + endings=['_Blocks'], + base_path='maps/')) + d = dict(l) + assert len(d) == len(l) + return d diff --git a/tools/parse/map_constants.py b/tools/parse/map/constants.py similarity index 84% rename from tools/parse/map_constants.py rename to tools/parse/map/constants.py index 587a4e0..dd29514 100644 --- a/tools/parse/map_constants.py +++ b/tools/parse/map/constants.py @@ -1,9 +1,9 @@ from dataclasses import dataclass from functools import partial -from parse.generic import macro_line +from parse.generic import tokenize -tokenize_lines = partial(macro_line.tokenize_lines, prefix="map_const") +tokenize_lines = partial(tokenize.lines, prefix="map_const") @dataclass class MapConstant: diff --git a/tools/parse/map_header.py b/tools/parse/map/headers.py similarity index 94% rename from tools/parse/map_header.py rename to tools/parse/map/headers.py index d1063bf..1fe5ef3 100644 --- a/tools/parse/map_header.py +++ b/tools/parse/map/headers.py @@ -1,6 +1,6 @@ from dataclasses import dataclass -from parse.generic import macro_line +from parse.generic import tokenize # data/maps/headers/AgathasRoom.asm @@ -82,11 +82,9 @@ def flatten(tokens): key=lambda c: c.name)) ) -tokenize_lines = macro_line.tokenize_lines - def parse(path): with open(path) as f: - tokens = list(tokenize_lines(f.read().split('\n'))) + tokens = list(tokenize.lines(f.read().split('\n'))) return flatten(tokens) def parse_all(prefix): diff --git a/tools/parse/hidden_objects.py b/tools/parse/map/hidden_objects.py similarity index 93% rename from tools/parse/hidden_objects.py rename to tools/parse/map/hidden_objects.py index 65099eb..3b445c4 100644 --- a/tools/parse/hidden_objects.py +++ b/tools/parse/map/hidden_objects.py @@ -1,7 +1,7 @@ from dataclasses import dataclass -from parse import number -from parse.generic import macro_line +from parse.generic import number +from parse.generic import tokenize def tokenize_label(line): return line.split(':')[0].strip() @@ -11,9 +11,9 @@ def tokenize_lines(lines): if line.strip().endswith(':'): yield (tokenize_label(line),) elif ('dw ' in line or 'db ' in line) and ' \\' not in line: - yield macro_line.tokenize_line(line) + yield tokenize.line(line) elif 'hidden_object' in line or 'hidden_text_predef' in line: - yield macro_line.tokenize_line(line) + yield tokenize.line(line) def flatten0(tokens): stack = [] diff --git a/tools/parse/map_objects.py b/tools/parse/map/objects.py similarity index 95% rename from tools/parse/map_objects.py rename to tools/parse/map/objects.py index 6a15676..9d3ea03 100644 --- a/tools/parse/map_objects.py +++ b/tools/parse/map/objects.py @@ -1,7 +1,8 @@ from dataclasses import dataclass -from parse import number -from parse.generic import macro_line +from parse.generic import number +from parse.generic import tokenize +from parse.generic.flatten import flatten @dataclass class ObjectEvent: @@ -36,7 +37,7 @@ def tokenize_label(line): return ('label', line.split(':')[0].strip()) def tokenize_event(line): - return list(macro_line.tokenize_line(line)) + return list(tokenize.line(line)) def tokenize_border_block(line): return ('border_block', number.parse(line.strip().split()[1])) diff --git a/tools/parse/maps_blocks.py b/tools/parse/maps_blocks.py deleted file mode 100644 index e20529d..0000000 --- a/tools/parse/maps_blocks.py +++ /dev/null @@ -1,43 +0,0 @@ -def tokenize_block(line, delim): - name_args = line.split(delim) - if len(name_args) == 1: - name, = name_args - return (name.split(';')[0].strip(),) - else: - name, args = name_args - if args.strip(): - _, path = args.strip().split(' ') - return name, path.strip('"') - else: - return (name,) - -def tokenize_lines(lines): - for line in lines: - if '_Blocks:' in line: - yield tokenize_block(line, delim=':') - -def flatten(tokens, endings, base_path): - stack = [] - for name_path in tokens: - if len(name_path) == 2: - name, path = name_path - stack.append(name) - for s_name in stack: - assert any(s_name.endswith(e) for e in endings), (s_name, endings) - assert path.startswith(base_path), path - yield s_name, path - stack = [] - elif len(name_path) == 1: - stack.append(name_path[0]) - else: - assert False, name_path - -def parse(prefix): - with open(prefix / 'maps.asm') as f: - tokens = tokenize_lines(f.read().split('\n')) - l = list(flatten(tokens, - endings=['_Blocks'], - base_path='maps/')) - d = dict(l) - assert len(d) == len(l) - return d diff --git a/tools/parse/move/constants.py b/tools/parse/move/constants.py index cd64b2b..dc50ba5 100644 --- a/tools/parse/move/constants.py +++ b/tools/parse/move/constants.py @@ -1,17 +1,4 @@ from functools import partial -from parse.generic import macro_line +from parse.generic import constants -tokenize_lines = partial(macro_line.tokenize_lines, prefix='const ') - -def flatten(tokens): - index = 0 - for t in tokens: - assert t[0] == 'const', t - _, (name,) = t - yield name, index - index += 1 - -def parse(prefix): - path = prefix / 'constants/move_constants.asm' - with open(path) as f: - return dict(flatten(tokenize_lines(f.read().split('\n')))) +partial(constants.parse, path='constants/move_constants.asm') diff --git a/tools/parse/parse.py b/tools/parse/parse.py index 2f3682f..9224498 100644 --- a/tools/parse/parse.py +++ b/tools/parse/parse.py @@ -2,25 +2,25 @@ import sys from pathlib import Path -from parse import map_header -from parse import maps_blocks -from parse import tileset_constants -from parse import tileset_headers -from parse import gfx_tilesets -from parse import collision_tile_ids -from parse import map_objects -from parse import hidden_objects -from parse import map_constants +from parse import map # constants + # headers + # blocks + # objects + # hidden_objects + +from parse import tileset # constants + # headers + # gfx + # collision_tile_ids + +from parse import spritesheet # constants + # spritesheets + # gfx -from parse import gfx_sprites -from parse import spritesheets -from parse import sprite_constants from parse import text from parse import scripts -prefix = Path(sys.argv[1]) - def memoize(f): value = None def inner(): @@ -30,29 +30,34 @@ def memoize(f): return value return inner -map_constants_list = memoize(lambda: map_constants.parse(prefix)) -map_headers = memoize(lambda: map_header.parse_all(prefix)) -maps_blocks_list = memoize(lambda: maps_blocks.parse(prefix)) -map_objects_list = memoize(lambda: map_objects.parse_all(prefix)) +prefix = Path(sys.argv[1]) -tileset_constants_list = memoize(lambda: tileset_constants.parse(prefix)) -tileset_headers_list = memoize(lambda: tileset_headers.parse(prefix)) -gfx_tilesets_list = memoize(lambda: gfx_tilesets.parse(prefix)) -collision_tile_ids_list = memoize(lambda: collision_tile_ids.parse(prefix)) -hidden_objects_list = memoize(lambda: hidden_objects.parse(prefix)) +# map +map_constants_list = memoize(lambda: map.constants.parse(prefix)) +map_headers = memoize(lambda: map.headers.parse_all(prefix)) +map_blocks_list = memoize(lambda: map.blocks.parse(prefix)) +map_objects_list = memoize(lambda: map.objects.parse_all(prefix)) +map_hidden_objects_list = memoize(lambda: map.hidden_objects.parse(prefix)) +# tileset +tileset_constants_list = memoize(lambda: tileset.constants.parse(prefix)) +tileset_headers_list = memoize(lambda: tileset.headers.parse(prefix)) +tileset_gfx_list = memoize(lambda: tileset.gfx.parse(prefix)) +tileset_collision_tile_ids_list = memoize(lambda: tileset.collision_tile_ids.parse(prefix)) + + + +# sprites +spritesheet_gfx_list = memoize(lambda: spritesheet.gfx.parse(prefix)) +spritesheet_spritesheets_list = memoize(lambda: spritesheet.spritesheets.parse(prefix)) +spritesheet_constants_list = memoize(lambda: spritesheet.constants.parse(prefix)) + +# text +scripts_list = memoize(lambda: scripts.parse_all(prefix)) +text_list = memoize(lambda: text.parse_all(prefix)) # need: #data/tilesets/pair_collision_tile_ids.asm #cut_tree_blocks.asm # home/vcopy: animations - -# sprites -gfx_sprites_list = memoize(lambda: gfx_sprites.parse(prefix)) -spritesheets_list = memoize(lambda: spritesheets.parse(prefix)) -sprite_constants_list = memoize(lambda: sprite_constants.parse(prefix)) - -# text -scripts_list = memoize(lambda: scripts.parse_all(prefix)) -text_list = memoize(lambda: text.parse_all(prefix)) diff --git a/tools/parse/pokemon.py b/tools/parse/pokemon.py new file mode 100644 index 0000000..dc123de --- /dev/null +++ b/tools/parse/pokemon.py @@ -0,0 +1,79 @@ +# gfx/pics.asm: +""" +SquirtlePicFront:: INCBIN "gfx/pokemon/front/squirtle.pic" +SquirtlePicBack:: INCBIN "gfx/pokemon/back/squirtleb.pic" +""" + +# data/pokemon/base_stats/*.asm +""" + db DEX_TAUROS ; pokedex id + + db 75, 100, 95, 110, 70 + ; hp atk def spd spc + + db NORMAL, NORMAL ; type + db 45 ; catch rate + db 211 ; base exp + + INCBIN "gfx/pokemon/front/tauros.pic", 0, 1 ; sprite dimensions + dw TaurosPicFront, TaurosPicBack + + db TACKLE, NO_MOVE, NO_MOVE, NO_MOVE ; level 1 learnset + db GROWTH_SLOW ; growth rate + + ; tm/hm learnset + tmhm TOXIC, HORN_DRILL, BODY_SLAM, TAKE_DOWN, DOUBLE_EDGE, \ + ICE_BEAM, BLIZZARD, HYPER_BEAM, RAGE, THUNDERBOLT, \ + THUNDER, EARTHQUAKE, FISSURE, MIMIC, DOUBLE_TEAM, \ + BIDE, FIRE_BLAST, SKULL_BASH, REST, SUBSTITUTE, \ + STRENGTH + ; end + + db 0 ; padding +""" + +# ./data/pokemon/evos_moves.asm +# ordered by pokemon_constants +""" +EvosMovesPointerTable: + table_width 2, EvosMovesPointerTable + dw RhydonEvosMoves + dw KangaskhanEvosMoves + dw NidoranMEvosMoves + dw ClefairyEvosMoves + +.... + +OddishEvosMoves: +; Evolutions + db EV_LEVEL, 21, GLOOM + db 0 +; Learnset + db 15, POISONPOWDER + db 17, STUN_SPORE + db 19, SLEEP_POWDER + db 24, ACID + db 33, PETAL_DANCE + db 46, SOLARBEAM + db 0 +""" + +# constants/pokemon_constants.asm +""" + const_def + const NO_MON ; $00 + const RHYDON ; $01 + const KANGASKHAN ; $02 + const NIDORAN_M ; $03 +""" + +# data/pokemon/dex_entries.asm + +# data/pokemon/dex_order.asm + +# data/pokemon/names.asm + +# data/wild/grass_water.asm +# WildDataPointers +# data/wild/maps/Route1.asm +# Route1WildMons diff --git a/tools/parse/pokemon/constants.py b/tools/parse/pokemon/constants.py new file mode 100644 index 0000000..dad42ab --- /dev/null +++ b/tools/parse/pokemon/constants.py @@ -0,0 +1,4 @@ +from functools import partial +from parse.generic import constants + +partial(constants.parse, path='constants/pokemon_constants.asm') diff --git a/tools/parse/scripts.py b/tools/parse/scripts.py index 8111de7..707d4b7 100644 --- a/tools/parse/scripts.py +++ b/tools/parse/scripts.py @@ -1,4 +1,4 @@ -from parse.line import next_line, skip_whitespace +from parse.generic.line import next_line, skip_whitespace def parse_dw_const(line): dw_const, args = line.split(maxsplit=1) diff --git a/tools/parse/sprite_constants.py b/tools/parse/sprite_constants.py deleted file mode 100644 index ebd20cf..0000000 --- a/tools/parse/sprite_constants.py +++ /dev/null @@ -1,6 +0,0 @@ -from parse.tileset_constants import flatten, tokenize_lines - -def parse(prefix): - path = prefix / 'constants/sprite_constants.asm' - with open(path) as f: - return dict(flatten(tokenize_lines(f.read().split('\n')))) diff --git a/tools/parse/spritesheet/__init__.py b/tools/parse/spritesheet/__init__.py new file mode 100644 index 0000000..947304b --- /dev/null +++ b/tools/parse/spritesheet/__init__.py @@ -0,0 +1,3 @@ +from parse.spritesheet import constants +from parse.spritesheet import spritesheets +from parse.spritesheet import gfx diff --git a/tools/parse/spritesheet/constants.py b/tools/parse/spritesheet/constants.py new file mode 100644 index 0000000..3130d72 --- /dev/null +++ b/tools/parse/spritesheet/constants.py @@ -0,0 +1,4 @@ +from functools import partial +from parse.generic import constants + +parse = partial(constants.parse, path='constants/sprite_constants.asm') diff --git a/tools/parse/gfx_sprites.py b/tools/parse/spritesheet/gfx.py similarity index 75% rename from tools/parse/gfx_sprites.py rename to tools/parse/spritesheet/gfx.py index a2df4ae..c701413 100644 --- a/tools/parse/gfx_sprites.py +++ b/tools/parse/spritesheet/gfx.py @@ -1,9 +1,10 @@ -from parse.maps_blocks import tokenize_block, flatten +from parse.generic import tokenize +from parse.generic.flatten import flatten def tokenize_lines(lines): for line in lines: if '::' in line: - yield tokenize_block(line, delim='::') + yield tokenize.block(line, delim='::') def parse(prefix): path = prefix / 'gfx/sprites.asm' diff --git a/tools/parse/spritesheets.py b/tools/parse/spritesheet/spritesheets.py similarity index 85% rename from tools/parse/spritesheets.py rename to tools/parse/spritesheet/spritesheets.py index 6e7ddcf..889c3b7 100644 --- a/tools/parse/spritesheets.py +++ b/tools/parse/spritesheet/spritesheets.py @@ -1,9 +1,9 @@ from dataclasses import dataclass from functools import partial -from parse import number -from parse.generic import macro_line +from parse.generic import number +from parse.generic import tokenize -tokenize_lines = partial(macro_line.tokenize_lines, prefix='overworld_sprite') +tokenize_lines = partial(tokenize.lines, prefix='overworld_sprite') @dataclass class Spritesheet: diff --git a/tools/parse/text.py b/tools/parse/text.py index 2ae4747..737cf9e 100644 --- a/tools/parse/text.py +++ b/tools/parse/text.py @@ -1,6 +1,6 @@ from itertools import chain -from parse.line import next_line, skip_whitespace +from parse.generic.line import next_line, skip_whitespace def parse_label(lines): lines, line = next_line(lines) diff --git a/tools/parse/tileset/__init__.py b/tools/parse/tileset/__init__.py new file mode 100644 index 0000000..e3005d1 --- /dev/null +++ b/tools/parse/tileset/__init__.py @@ -0,0 +1,4 @@ +from parse.tileset import constants +from parse.tileset import headers +from parse.tileset import gfx +from parse.tileset import collision_tile_ids diff --git a/tools/parse/collision_tile_ids.py b/tools/parse/tileset/collision_tile_ids.py similarity index 73% rename from tools/parse/collision_tile_ids.py rename to tools/parse/tileset/collision_tile_ids.py index 00114e0..a19d0d3 100644 --- a/tools/parse/collision_tile_ids.py +++ b/tools/parse/tileset/collision_tile_ids.py @@ -1,15 +1,14 @@ from functools import partial -from parse.maps_blocks import tokenize_block -from parse import number -from parse.generic import macro_line +from parse.generic import tokenize +from parse.generic import number def tokenize_lines(lines): for line in lines: if '_Coll:' in line: - yield tokenize_block(line, delim='::') + yield tokenize.block(line, delim='::') elif 'coll_tiles' in line: - tokens, = tokenize_block(line, delim='::') - yield macro_line.tokenize_line(tokens) + tokens, = tokenize.block(line, delim='::') + yield tokenize.line(tokens) def flatten(tokens): stack = [] diff --git a/tools/parse/tileset/constants.py b/tools/parse/tileset/constants.py new file mode 100644 index 0000000..12ee389 --- /dev/null +++ b/tools/parse/tileset/constants.py @@ -0,0 +1,4 @@ +from functools import partial +from parse.generic import constants + +parse = partial(constants.parse, path='constants/tileset_constants.asm') diff --git a/tools/parse/gfx_tilesets.py b/tools/parse/tileset/gfx.py similarity index 77% rename from tools/parse/gfx_tilesets.py rename to tools/parse/tileset/gfx.py index 455df9a..1eeb82c 100644 --- a/tools/parse/gfx_tilesets.py +++ b/tools/parse/tileset/gfx.py @@ -1,9 +1,10 @@ -from parse.maps_blocks import tokenize_block, flatten +from parse.generic import tokenize +from parse.generic.flatten import flatten def tokenize_lines(lines): for line in lines: if '_GFX:' in line or '_Block:' in line: - yield tokenize_block(line, delim='::') + yield tokenize.block(line, delim='::') def parse(prefix): path = prefix / 'gfx/tilesets.asm' diff --git a/tools/parse/tileset_headers.py b/tools/parse/tileset/headers.py similarity index 87% rename from tools/parse/tileset_headers.py rename to tools/parse/tileset/headers.py index e8d9175..88dcf5e 100644 --- a/tools/parse/tileset_headers.py +++ b/tools/parse/tileset/headers.py @@ -1,8 +1,8 @@ from dataclasses import dataclass from functools import partial -from parse import number -from parse.generic import macro_line +from parse.generic import number +from parse.generic import tokenize @dataclass class TilesetHeader: @@ -21,7 +21,7 @@ class TilesetHeader: def coll(self): return f"{self.name}_Coll" -tokenize_lines = partial(macro_line.tokenize_lines, prefix="tileset") +tokenize_lines = partial(tokenize.lines, prefix="tileset") def flatten(tokens): for ts in tokens: