tools/parse: reorganize parsers
This commit is contained in:
parent
f4aad91349
commit
ac43946728
@ -11,7 +11,7 @@ def extern_collision_tile_ids():
|
|||||||
for name, index in sorted_tilesets_constants_list():
|
for name, index in sorted_tilesets_constants_list():
|
||||||
tileset_header = parse.tileset_headers_list()[index]
|
tileset_header = parse.tileset_headers_list()[index]
|
||||||
coll_path = tileset_header.coll()
|
coll_path = tileset_header.coll()
|
||||||
tile_ids = parse.collision_tile_ids_list()[coll_path]
|
tile_ids = parse.tileset_collision_tile_ids_list()[coll_path]
|
||||||
yield f"extern uint8_t {coll_path}[{len(tile_ids)}];"
|
yield f"extern uint8_t {coll_path}[{len(tile_ids)}];"
|
||||||
|
|
||||||
def generate_header():
|
def generate_header():
|
||||||
@ -23,7 +23,7 @@ def generate_header():
|
|||||||
def collision_array(name, index):
|
def collision_array(name, index):
|
||||||
tileset_header = parse.tileset_headers_list()[index]
|
tileset_header = parse.tileset_headers_list()[index]
|
||||||
coll_path = tileset_header.coll()
|
coll_path = tileset_header.coll()
|
||||||
tile_ids = parse.collision_tile_ids_list()[coll_path]
|
tile_ids = parse.tileset_collision_tile_ids_list()[coll_path]
|
||||||
yield f"uint8_t {coll_path}[] = {{"
|
yield f"uint8_t {coll_path}[] = {{"
|
||||||
yield " ".join(
|
yield " ".join(
|
||||||
f"{tile_ix},"
|
f"{tile_ix},"
|
||||||
|
@ -19,7 +19,7 @@ from generate.sort import default_sort
|
|||||||
from generate.binary import binary_res, start_size_value
|
from generate.binary import binary_res, start_size_value
|
||||||
from generate.generate import renderer
|
from generate.generate import renderer
|
||||||
|
|
||||||
from parse.map_header import Connection
|
from parse.map.headers import Connection
|
||||||
|
|
||||||
directions = sorted(['north', 'south', 'east', 'west'])
|
directions = sorted(['north', 'south', 'east', 'west'])
|
||||||
|
|
||||||
@ -42,7 +42,7 @@ def includes_header():
|
|||||||
yield '#include "tilesets.hpp"'
|
yield '#include "tilesets.hpp"'
|
||||||
yield ""
|
yield ""
|
||||||
for map_header in sorted_map_headers():
|
for map_header in sorted_map_headers():
|
||||||
block_path = parse.maps_blocks_list()[map_header.blocks()]
|
block_path = parse.map_blocks_list()[map_header.blocks()]
|
||||||
yield f'#include "../res/{block_path}.h"'
|
yield f'#include "../res/{block_path}.h"'
|
||||||
yield ""
|
yield ""
|
||||||
|
|
||||||
@ -118,7 +118,7 @@ def text_pointers(map_header):
|
|||||||
yield f".text_pointers = &{map_header.text_pointers()}[0],"
|
yield f".text_pointers = &{map_header.text_pointers()}[0],"
|
||||||
|
|
||||||
def map(map_header):
|
def map(map_header):
|
||||||
block_path = parse.maps_blocks_list()[map_header.blocks()]
|
block_path = parse.map_blocks_list()[map_header.blocks()]
|
||||||
map_constant = parse.map_constants_list()[map_header.name2]
|
map_constant = parse.map_constants_list()[map_header.name2]
|
||||||
return [
|
return [
|
||||||
f"[map_t::{map_header.name2.lower()}] = {{",
|
f"[map_t::{map_header.name2.lower()}] = {{",
|
||||||
|
@ -14,7 +14,7 @@ from generate.binary import binary_res, start_size_value
|
|||||||
from generate.generate import renderer
|
from generate.generate import renderer
|
||||||
|
|
||||||
def sorted_sprite_constants_list():
|
def sorted_sprite_constants_list():
|
||||||
return sorted(parse.sprite_constants_list().items(), key=default_sort)
|
return sorted(parse.spritesheet_constants_list().items(), key=default_sort)
|
||||||
|
|
||||||
def includes_header():
|
def includes_header():
|
||||||
yield '#pragma once'
|
yield '#pragma once'
|
||||||
@ -25,8 +25,8 @@ def includes_header():
|
|||||||
if name == 'SPRITE_NONE':
|
if name == 'SPRITE_NONE':
|
||||||
continue
|
continue
|
||||||
assert index != 0, index
|
assert index != 0, index
|
||||||
spritesheet = parse.spritesheets_list()[index - 1]
|
spritesheet = parse.spritesheet_spritesheets_list()[index - 1]
|
||||||
sprite_path = parse.gfx_sprites_list()[spritesheet.name]
|
sprite_path = parse.spritesheet_gfx_list()[spritesheet.name]
|
||||||
yield f'#include "../res/{sprite_path}.h"'
|
yield f'#include "../res/{sprite_path}.h"'
|
||||||
|
|
||||||
def includes_source():
|
def includes_source():
|
||||||
@ -76,8 +76,8 @@ def sprite(name, index):
|
|||||||
else:
|
else:
|
||||||
# spritesheets_list does not include SPRITE_NULL at index 0
|
# spritesheets_list does not include SPRITE_NULL at index 0
|
||||||
assert index != 0, index
|
assert index != 0, index
|
||||||
spritesheet = parse.spritesheets_list()[index - 1]
|
spritesheet = parse.spritesheet_spritesheets_list()[index - 1]
|
||||||
sprite_path = parse.gfx_sprites_list()[spritesheet.name]
|
sprite_path = parse.spritesheet_gfx_list()[spritesheet.name]
|
||||||
sprite_count = spritesheet.sprite_count
|
sprite_count = spritesheet.sprite_count
|
||||||
return [
|
return [
|
||||||
f"[spritesheet_t::{sprite_name(name)}] = {{",
|
f"[spritesheet_t::{sprite_name(name)}] = {{",
|
||||||
|
@ -16,8 +16,8 @@ def includes_header():
|
|||||||
tileset_index = parse.tileset_constants_list()[tileset_name]
|
tileset_index = parse.tileset_constants_list()[tileset_name]
|
||||||
tileset_header = parse.tileset_headers_list()[tileset_index]
|
tileset_header = parse.tileset_headers_list()[tileset_index]
|
||||||
|
|
||||||
blockset_path = parse.gfx_tilesets_list()[tileset_header.blockset()]
|
blockset_path = parse.tileset_gfx_list()[tileset_header.blockset()]
|
||||||
gfx_path = parse.gfx_tilesets_list()[tileset_header.gfx()]
|
gfx_path = parse.tileset_gfx_list()[tileset_header.gfx()]
|
||||||
|
|
||||||
yield f'#include "../res/{blockset_path}.h"'
|
yield f'#include "../res/{blockset_path}.h"'
|
||||||
yield f'#include "../res/{gfx_path}.h"'
|
yield f'#include "../res/{gfx_path}.h"'
|
||||||
@ -55,8 +55,8 @@ def generate_header():
|
|||||||
def blockset_tileset(name, index):
|
def blockset_tileset(name, index):
|
||||||
tileset_header = parse.tileset_headers_list()[index]
|
tileset_header = parse.tileset_headers_list()[index]
|
||||||
|
|
||||||
blockset_path = parse.gfx_tilesets_list()[tileset_header.blockset()]
|
blockset_path = parse.tileset_gfx_list()[tileset_header.blockset()]
|
||||||
gfx_path = parse.gfx_tilesets_list()[tileset_header.gfx()]
|
gfx_path = parse.tileset_gfx_list()[tileset_header.gfx()]
|
||||||
coll_path = tileset_header.coll()
|
coll_path = tileset_header.coll()
|
||||||
|
|
||||||
return [
|
return [
|
||||||
|
@ -1,16 +1,18 @@
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
from parse.generic import macro_line
|
|
||||||
|
|
||||||
tokenize_lines = partial(macro_line.tokenize_lines, prefix='const ')
|
from parse.generic import tokenize
|
||||||
|
|
||||||
def flatten(tokens):
|
def flatten(tokens):
|
||||||
index = 0
|
index = 0
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
assert t[0] == 'const', t
|
assert t[0] == 'const', t
|
||||||
yield t[1][0], index
|
_, (name,) = t
|
||||||
|
yield name, index
|
||||||
index += 1
|
index += 1
|
||||||
|
|
||||||
def parse(prefix):
|
tokenize_lines = partial(tokenize.lines, prefix='const ')
|
||||||
path = prefix / 'constants/tileset_constants.asm'
|
|
||||||
|
def parse(prefix, path):
|
||||||
|
path = prefix / path
|
||||||
with open(path) as f:
|
with open(path) as f:
|
||||||
return dict(flatten(tokenize_lines(f.read().split('\n'))))
|
return dict(flatten(tokenize_lines(f.read().split('\n'))))
|
23
tools/parse/generic/flatten.py
Normal file
23
tools/parse/generic/flatten.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
def flatten(tokens, endings, base_path):
|
||||||
|
"""
|
||||||
|
Used by
|
||||||
|
|
||||||
|
- parse.map.blocks
|
||||||
|
- parse.map.objects
|
||||||
|
- parse.tileset.gfx
|
||||||
|
- parse.spriteseheet.gfx
|
||||||
|
"""
|
||||||
|
stack = []
|
||||||
|
for name_path in tokens:
|
||||||
|
if len(name_path) == 2:
|
||||||
|
name, path = name_path
|
||||||
|
stack.append(name)
|
||||||
|
for s_name in stack:
|
||||||
|
assert any(s_name.endswith(e) for e in endings), (s_name, endings)
|
||||||
|
assert path.startswith(base_path), path
|
||||||
|
yield s_name, path
|
||||||
|
stack = []
|
||||||
|
elif len(name_path) == 1:
|
||||||
|
stack.append(name_path[0])
|
||||||
|
else:
|
||||||
|
assert False, name_path
|
@ -1,11 +1,13 @@
|
|||||||
def tokenize_params(params):
|
def params(params):
|
||||||
for param in params:
|
for param in params:
|
||||||
if '|' in param:
|
if '|' in param:
|
||||||
yield [p.strip() for p in param.split(' |')]
|
yield [p.strip() for p in param.split(' |')]
|
||||||
else:
|
else:
|
||||||
yield param.strip()
|
yield param.strip()
|
||||||
|
|
||||||
def tokenize_line(line):
|
tokenize_params = params
|
||||||
|
|
||||||
|
def line(line):
|
||||||
line = line.split(';')[0].strip()
|
line = line.split(';')[0].strip()
|
||||||
key_params = line.split(' ', maxsplit=1)
|
key_params = line.split(' ', maxsplit=1)
|
||||||
if len(key_params) == 1:
|
if len(key_params) == 1:
|
||||||
@ -15,8 +17,23 @@ def tokenize_line(line):
|
|||||||
params = [p.strip() for p in params.split(',')]
|
params = [p.strip() for p in params.split(',')]
|
||||||
return key, list(tokenize_params(params))
|
return key, list(tokenize_params(params))
|
||||||
|
|
||||||
def tokenize_lines(lines, prefix=""):
|
tokenize_line = line
|
||||||
|
|
||||||
|
def lines(lines, prefix=""):
|
||||||
for line in filter(bool, lines):
|
for line in filter(bool, lines):
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line.startswith(prefix):
|
if line.startswith(prefix):
|
||||||
yield tokenize_line(line)
|
yield tokenize_line(line)
|
||||||
|
|
||||||
|
def block(line, delim):
|
||||||
|
name_args = line.split(delim)
|
||||||
|
if len(name_args) == 1:
|
||||||
|
name, = name_args
|
||||||
|
return (name.split(';')[0].strip(),)
|
||||||
|
else:
|
||||||
|
name, args = name_args
|
||||||
|
if args.strip():
|
||||||
|
_, path = args.strip().split(' ')
|
||||||
|
return name, path.strip('"')
|
||||||
|
else:
|
||||||
|
return (name,)
|
5
tools/parse/map/__init__.py
Normal file
5
tools/parse/map/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
from parse.map import constants
|
||||||
|
from parse.map import headers
|
||||||
|
from parse.map import blocks
|
||||||
|
from parse.map import objects
|
||||||
|
from parse.map import hidden_objects
|
17
tools/parse/map/blocks.py
Normal file
17
tools/parse/map/blocks.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from parse.generic import tokenize
|
||||||
|
from parse.generic.flatten import flatten
|
||||||
|
|
||||||
|
def tokenize_lines(lines):
|
||||||
|
for line in lines:
|
||||||
|
if '_Blocks:' in line:
|
||||||
|
yield tokenize.block(line, delim=':')
|
||||||
|
|
||||||
|
def parse(prefix):
|
||||||
|
with open(prefix / 'maps.asm') as f:
|
||||||
|
tokens = tokenize_lines(f.read().split('\n'))
|
||||||
|
l = list(flatten(tokens,
|
||||||
|
endings=['_Blocks'],
|
||||||
|
base_path='maps/'))
|
||||||
|
d = dict(l)
|
||||||
|
assert len(d) == len(l)
|
||||||
|
return d
|
@ -1,9 +1,9 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from parse.generic import macro_line
|
from parse.generic import tokenize
|
||||||
|
|
||||||
tokenize_lines = partial(macro_line.tokenize_lines, prefix="map_const")
|
tokenize_lines = partial(tokenize.lines, prefix="map_const")
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MapConstant:
|
class MapConstant:
|
@ -1,6 +1,6 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from parse.generic import macro_line
|
from parse.generic import tokenize
|
||||||
|
|
||||||
# data/maps/headers/AgathasRoom.asm
|
# data/maps/headers/AgathasRoom.asm
|
||||||
|
|
||||||
@ -82,11 +82,9 @@ def flatten(tokens):
|
|||||||
key=lambda c: c.name))
|
key=lambda c: c.name))
|
||||||
)
|
)
|
||||||
|
|
||||||
tokenize_lines = macro_line.tokenize_lines
|
|
||||||
|
|
||||||
def parse(path):
|
def parse(path):
|
||||||
with open(path) as f:
|
with open(path) as f:
|
||||||
tokens = list(tokenize_lines(f.read().split('\n')))
|
tokens = list(tokenize.lines(f.read().split('\n')))
|
||||||
return flatten(tokens)
|
return flatten(tokens)
|
||||||
|
|
||||||
def parse_all(prefix):
|
def parse_all(prefix):
|
@ -1,7 +1,7 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from parse import number
|
from parse.generic import number
|
||||||
from parse.generic import macro_line
|
from parse.generic import tokenize
|
||||||
|
|
||||||
def tokenize_label(line):
|
def tokenize_label(line):
|
||||||
return line.split(':')[0].strip()
|
return line.split(':')[0].strip()
|
||||||
@ -11,9 +11,9 @@ def tokenize_lines(lines):
|
|||||||
if line.strip().endswith(':'):
|
if line.strip().endswith(':'):
|
||||||
yield (tokenize_label(line),)
|
yield (tokenize_label(line),)
|
||||||
elif ('dw ' in line or 'db ' in line) and ' \\' not in line:
|
elif ('dw ' in line or 'db ' in line) and ' \\' not in line:
|
||||||
yield macro_line.tokenize_line(line)
|
yield tokenize.line(line)
|
||||||
elif 'hidden_object' in line or 'hidden_text_predef' in line:
|
elif 'hidden_object' in line or 'hidden_text_predef' in line:
|
||||||
yield macro_line.tokenize_line(line)
|
yield tokenize.line(line)
|
||||||
|
|
||||||
def flatten0(tokens):
|
def flatten0(tokens):
|
||||||
stack = []
|
stack = []
|
@ -1,7 +1,8 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from parse import number
|
from parse.generic import number
|
||||||
from parse.generic import macro_line
|
from parse.generic import tokenize
|
||||||
|
from parse.generic.flatten import flatten
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ObjectEvent:
|
class ObjectEvent:
|
||||||
@ -36,7 +37,7 @@ def tokenize_label(line):
|
|||||||
return ('label', line.split(':')[0].strip())
|
return ('label', line.split(':')[0].strip())
|
||||||
|
|
||||||
def tokenize_event(line):
|
def tokenize_event(line):
|
||||||
return list(macro_line.tokenize_line(line))
|
return list(tokenize.line(line))
|
||||||
|
|
||||||
def tokenize_border_block(line):
|
def tokenize_border_block(line):
|
||||||
return ('border_block', number.parse(line.strip().split()[1]))
|
return ('border_block', number.parse(line.strip().split()[1]))
|
@ -1,43 +0,0 @@
|
|||||||
def tokenize_block(line, delim):
|
|
||||||
name_args = line.split(delim)
|
|
||||||
if len(name_args) == 1:
|
|
||||||
name, = name_args
|
|
||||||
return (name.split(';')[0].strip(),)
|
|
||||||
else:
|
|
||||||
name, args = name_args
|
|
||||||
if args.strip():
|
|
||||||
_, path = args.strip().split(' ')
|
|
||||||
return name, path.strip('"')
|
|
||||||
else:
|
|
||||||
return (name,)
|
|
||||||
|
|
||||||
def tokenize_lines(lines):
|
|
||||||
for line in lines:
|
|
||||||
if '_Blocks:' in line:
|
|
||||||
yield tokenize_block(line, delim=':')
|
|
||||||
|
|
||||||
def flatten(tokens, endings, base_path):
|
|
||||||
stack = []
|
|
||||||
for name_path in tokens:
|
|
||||||
if len(name_path) == 2:
|
|
||||||
name, path = name_path
|
|
||||||
stack.append(name)
|
|
||||||
for s_name in stack:
|
|
||||||
assert any(s_name.endswith(e) for e in endings), (s_name, endings)
|
|
||||||
assert path.startswith(base_path), path
|
|
||||||
yield s_name, path
|
|
||||||
stack = []
|
|
||||||
elif len(name_path) == 1:
|
|
||||||
stack.append(name_path[0])
|
|
||||||
else:
|
|
||||||
assert False, name_path
|
|
||||||
|
|
||||||
def parse(prefix):
|
|
||||||
with open(prefix / 'maps.asm') as f:
|
|
||||||
tokens = tokenize_lines(f.read().split('\n'))
|
|
||||||
l = list(flatten(tokens,
|
|
||||||
endings=['_Blocks'],
|
|
||||||
base_path='maps/'))
|
|
||||||
d = dict(l)
|
|
||||||
assert len(d) == len(l)
|
|
||||||
return d
|
|
@ -1,17 +1,4 @@
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
from parse.generic import macro_line
|
from parse.generic import constants
|
||||||
|
|
||||||
tokenize_lines = partial(macro_line.tokenize_lines, prefix='const ')
|
partial(constants.parse, path='constants/move_constants.asm')
|
||||||
|
|
||||||
def flatten(tokens):
|
|
||||||
index = 0
|
|
||||||
for t in tokens:
|
|
||||||
assert t[0] == 'const', t
|
|
||||||
_, (name,) = t
|
|
||||||
yield name, index
|
|
||||||
index += 1
|
|
||||||
|
|
||||||
def parse(prefix):
|
|
||||||
path = prefix / 'constants/move_constants.asm'
|
|
||||||
with open(path) as f:
|
|
||||||
return dict(flatten(tokenize_lines(f.read().split('\n'))))
|
|
||||||
|
@ -2,25 +2,25 @@ import sys
|
|||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from parse import map_header
|
from parse import map # constants
|
||||||
from parse import maps_blocks
|
# headers
|
||||||
from parse import tileset_constants
|
# blocks
|
||||||
from parse import tileset_headers
|
# objects
|
||||||
from parse import gfx_tilesets
|
# hidden_objects
|
||||||
from parse import collision_tile_ids
|
|
||||||
from parse import map_objects
|
from parse import tileset # constants
|
||||||
from parse import hidden_objects
|
# headers
|
||||||
from parse import map_constants
|
# gfx
|
||||||
|
# collision_tile_ids
|
||||||
|
|
||||||
|
from parse import spritesheet # constants
|
||||||
|
# spritesheets
|
||||||
|
# gfx
|
||||||
|
|
||||||
from parse import gfx_sprites
|
|
||||||
from parse import spritesheets
|
|
||||||
from parse import sprite_constants
|
|
||||||
|
|
||||||
from parse import text
|
from parse import text
|
||||||
from parse import scripts
|
from parse import scripts
|
||||||
|
|
||||||
prefix = Path(sys.argv[1])
|
|
||||||
|
|
||||||
def memoize(f):
|
def memoize(f):
|
||||||
value = None
|
value = None
|
||||||
def inner():
|
def inner():
|
||||||
@ -30,29 +30,34 @@ def memoize(f):
|
|||||||
return value
|
return value
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
map_constants_list = memoize(lambda: map_constants.parse(prefix))
|
prefix = Path(sys.argv[1])
|
||||||
map_headers = memoize(lambda: map_header.parse_all(prefix))
|
|
||||||
maps_blocks_list = memoize(lambda: maps_blocks.parse(prefix))
|
|
||||||
map_objects_list = memoize(lambda: map_objects.parse_all(prefix))
|
|
||||||
|
|
||||||
tileset_constants_list = memoize(lambda: tileset_constants.parse(prefix))
|
# map
|
||||||
tileset_headers_list = memoize(lambda: tileset_headers.parse(prefix))
|
map_constants_list = memoize(lambda: map.constants.parse(prefix))
|
||||||
gfx_tilesets_list = memoize(lambda: gfx_tilesets.parse(prefix))
|
map_headers = memoize(lambda: map.headers.parse_all(prefix))
|
||||||
collision_tile_ids_list = memoize(lambda: collision_tile_ids.parse(prefix))
|
map_blocks_list = memoize(lambda: map.blocks.parse(prefix))
|
||||||
hidden_objects_list = memoize(lambda: hidden_objects.parse(prefix))
|
map_objects_list = memoize(lambda: map.objects.parse_all(prefix))
|
||||||
|
map_hidden_objects_list = memoize(lambda: map.hidden_objects.parse(prefix))
|
||||||
|
|
||||||
|
# tileset
|
||||||
|
tileset_constants_list = memoize(lambda: tileset.constants.parse(prefix))
|
||||||
|
tileset_headers_list = memoize(lambda: tileset.headers.parse(prefix))
|
||||||
|
tileset_gfx_list = memoize(lambda: tileset.gfx.parse(prefix))
|
||||||
|
tileset_collision_tile_ids_list = memoize(lambda: tileset.collision_tile_ids.parse(prefix))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# sprites
|
||||||
|
spritesheet_gfx_list = memoize(lambda: spritesheet.gfx.parse(prefix))
|
||||||
|
spritesheet_spritesheets_list = memoize(lambda: spritesheet.spritesheets.parse(prefix))
|
||||||
|
spritesheet_constants_list = memoize(lambda: spritesheet.constants.parse(prefix))
|
||||||
|
|
||||||
|
# text
|
||||||
|
scripts_list = memoize(lambda: scripts.parse_all(prefix))
|
||||||
|
text_list = memoize(lambda: text.parse_all(prefix))
|
||||||
|
|
||||||
# need:
|
# need:
|
||||||
#data/tilesets/pair_collision_tile_ids.asm
|
#data/tilesets/pair_collision_tile_ids.asm
|
||||||
#cut_tree_blocks.asm
|
#cut_tree_blocks.asm
|
||||||
|
|
||||||
# home/vcopy: animations
|
# home/vcopy: animations
|
||||||
|
|
||||||
# sprites
|
|
||||||
gfx_sprites_list = memoize(lambda: gfx_sprites.parse(prefix))
|
|
||||||
spritesheets_list = memoize(lambda: spritesheets.parse(prefix))
|
|
||||||
sprite_constants_list = memoize(lambda: sprite_constants.parse(prefix))
|
|
||||||
|
|
||||||
# text
|
|
||||||
scripts_list = memoize(lambda: scripts.parse_all(prefix))
|
|
||||||
text_list = memoize(lambda: text.parse_all(prefix))
|
|
||||||
|
79
tools/parse/pokemon.py
Normal file
79
tools/parse/pokemon.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
# gfx/pics.asm:
|
||||||
|
"""
|
||||||
|
SquirtlePicFront:: INCBIN "gfx/pokemon/front/squirtle.pic"
|
||||||
|
SquirtlePicBack:: INCBIN "gfx/pokemon/back/squirtleb.pic"
|
||||||
|
"""
|
||||||
|
|
||||||
|
# data/pokemon/base_stats/*.asm
|
||||||
|
"""
|
||||||
|
db DEX_TAUROS ; pokedex id
|
||||||
|
|
||||||
|
db 75, 100, 95, 110, 70
|
||||||
|
; hp atk def spd spc
|
||||||
|
|
||||||
|
db NORMAL, NORMAL ; type
|
||||||
|
db 45 ; catch rate
|
||||||
|
db 211 ; base exp
|
||||||
|
|
||||||
|
INCBIN "gfx/pokemon/front/tauros.pic", 0, 1 ; sprite dimensions
|
||||||
|
dw TaurosPicFront, TaurosPicBack
|
||||||
|
|
||||||
|
db TACKLE, NO_MOVE, NO_MOVE, NO_MOVE ; level 1 learnset
|
||||||
|
db GROWTH_SLOW ; growth rate
|
||||||
|
|
||||||
|
; tm/hm learnset
|
||||||
|
tmhm TOXIC, HORN_DRILL, BODY_SLAM, TAKE_DOWN, DOUBLE_EDGE, \
|
||||||
|
ICE_BEAM, BLIZZARD, HYPER_BEAM, RAGE, THUNDERBOLT, \
|
||||||
|
THUNDER, EARTHQUAKE, FISSURE, MIMIC, DOUBLE_TEAM, \
|
||||||
|
BIDE, FIRE_BLAST, SKULL_BASH, REST, SUBSTITUTE, \
|
||||||
|
STRENGTH
|
||||||
|
; end
|
||||||
|
|
||||||
|
db 0 ; padding
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ./data/pokemon/evos_moves.asm
|
||||||
|
# ordered by pokemon_constants
|
||||||
|
"""
|
||||||
|
EvosMovesPointerTable:
|
||||||
|
table_width 2, EvosMovesPointerTable
|
||||||
|
dw RhydonEvosMoves
|
||||||
|
dw KangaskhanEvosMoves
|
||||||
|
dw NidoranMEvosMoves
|
||||||
|
dw ClefairyEvosMoves
|
||||||
|
|
||||||
|
....
|
||||||
|
|
||||||
|
OddishEvosMoves:
|
||||||
|
; Evolutions
|
||||||
|
db EV_LEVEL, 21, GLOOM
|
||||||
|
db 0
|
||||||
|
; Learnset
|
||||||
|
db 15, POISONPOWDER
|
||||||
|
db 17, STUN_SPORE
|
||||||
|
db 19, SLEEP_POWDER
|
||||||
|
db 24, ACID
|
||||||
|
db 33, PETAL_DANCE
|
||||||
|
db 46, SOLARBEAM
|
||||||
|
db 0
|
||||||
|
"""
|
||||||
|
|
||||||
|
# constants/pokemon_constants.asm
|
||||||
|
"""
|
||||||
|
const_def
|
||||||
|
const NO_MON ; $00
|
||||||
|
const RHYDON ; $01
|
||||||
|
const KANGASKHAN ; $02
|
||||||
|
const NIDORAN_M ; $03
|
||||||
|
"""
|
||||||
|
|
||||||
|
# data/pokemon/dex_entries.asm
|
||||||
|
|
||||||
|
# data/pokemon/dex_order.asm
|
||||||
|
|
||||||
|
# data/pokemon/names.asm
|
||||||
|
|
||||||
|
# data/wild/grass_water.asm
|
||||||
|
# WildDataPointers
|
||||||
|
# data/wild/maps/Route1.asm
|
||||||
|
# Route1WildMons
|
4
tools/parse/pokemon/constants.py
Normal file
4
tools/parse/pokemon/constants.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from functools import partial
|
||||||
|
from parse.generic import constants
|
||||||
|
|
||||||
|
partial(constants.parse, path='constants/pokemon_constants.asm')
|
@ -1,4 +1,4 @@
|
|||||||
from parse.line import next_line, skip_whitespace
|
from parse.generic.line import next_line, skip_whitespace
|
||||||
|
|
||||||
def parse_dw_const(line):
|
def parse_dw_const(line):
|
||||||
dw_const, args = line.split(maxsplit=1)
|
dw_const, args = line.split(maxsplit=1)
|
||||||
|
@ -1,6 +0,0 @@
|
|||||||
from parse.tileset_constants import flatten, tokenize_lines
|
|
||||||
|
|
||||||
def parse(prefix):
|
|
||||||
path = prefix / 'constants/sprite_constants.asm'
|
|
||||||
with open(path) as f:
|
|
||||||
return dict(flatten(tokenize_lines(f.read().split('\n'))))
|
|
3
tools/parse/spritesheet/__init__.py
Normal file
3
tools/parse/spritesheet/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from parse.spritesheet import constants
|
||||||
|
from parse.spritesheet import spritesheets
|
||||||
|
from parse.spritesheet import gfx
|
4
tools/parse/spritesheet/constants.py
Normal file
4
tools/parse/spritesheet/constants.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from functools import partial
|
||||||
|
from parse.generic import constants
|
||||||
|
|
||||||
|
parse = partial(constants.parse, path='constants/sprite_constants.asm')
|
@ -1,9 +1,10 @@
|
|||||||
from parse.maps_blocks import tokenize_block, flatten
|
from parse.generic import tokenize
|
||||||
|
from parse.generic.flatten import flatten
|
||||||
|
|
||||||
def tokenize_lines(lines):
|
def tokenize_lines(lines):
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if '::' in line:
|
if '::' in line:
|
||||||
yield tokenize_block(line, delim='::')
|
yield tokenize.block(line, delim='::')
|
||||||
|
|
||||||
def parse(prefix):
|
def parse(prefix):
|
||||||
path = prefix / 'gfx/sprites.asm'
|
path = prefix / 'gfx/sprites.asm'
|
@ -1,9 +1,9 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from parse import number
|
from parse.generic import number
|
||||||
from parse.generic import macro_line
|
from parse.generic import tokenize
|
||||||
|
|
||||||
tokenize_lines = partial(macro_line.tokenize_lines, prefix='overworld_sprite')
|
tokenize_lines = partial(tokenize.lines, prefix='overworld_sprite')
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Spritesheet:
|
class Spritesheet:
|
@ -1,6 +1,6 @@
|
|||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
|
||||||
from parse.line import next_line, skip_whitespace
|
from parse.generic.line import next_line, skip_whitespace
|
||||||
|
|
||||||
def parse_label(lines):
|
def parse_label(lines):
|
||||||
lines, line = next_line(lines)
|
lines, line = next_line(lines)
|
||||||
|
4
tools/parse/tileset/__init__.py
Normal file
4
tools/parse/tileset/__init__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from parse.tileset import constants
|
||||||
|
from parse.tileset import headers
|
||||||
|
from parse.tileset import gfx
|
||||||
|
from parse.tileset import collision_tile_ids
|
@ -1,15 +1,14 @@
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
from parse.maps_blocks import tokenize_block
|
from parse.generic import tokenize
|
||||||
from parse import number
|
from parse.generic import number
|
||||||
from parse.generic import macro_line
|
|
||||||
|
|
||||||
def tokenize_lines(lines):
|
def tokenize_lines(lines):
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if '_Coll:' in line:
|
if '_Coll:' in line:
|
||||||
yield tokenize_block(line, delim='::')
|
yield tokenize.block(line, delim='::')
|
||||||
elif 'coll_tiles' in line:
|
elif 'coll_tiles' in line:
|
||||||
tokens, = tokenize_block(line, delim='::')
|
tokens, = tokenize.block(line, delim='::')
|
||||||
yield macro_line.tokenize_line(tokens)
|
yield tokenize.line(tokens)
|
||||||
|
|
||||||
def flatten(tokens):
|
def flatten(tokens):
|
||||||
stack = []
|
stack = []
|
4
tools/parse/tileset/constants.py
Normal file
4
tools/parse/tileset/constants.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from functools import partial
|
||||||
|
from parse.generic import constants
|
||||||
|
|
||||||
|
parse = partial(constants.parse, path='constants/tileset_constants.asm')
|
@ -1,9 +1,10 @@
|
|||||||
from parse.maps_blocks import tokenize_block, flatten
|
from parse.generic import tokenize
|
||||||
|
from parse.generic.flatten import flatten
|
||||||
|
|
||||||
def tokenize_lines(lines):
|
def tokenize_lines(lines):
|
||||||
for line in lines:
|
for line in lines:
|
||||||
if '_GFX:' in line or '_Block:' in line:
|
if '_GFX:' in line or '_Block:' in line:
|
||||||
yield tokenize_block(line, delim='::')
|
yield tokenize.block(line, delim='::')
|
||||||
|
|
||||||
def parse(prefix):
|
def parse(prefix):
|
||||||
path = prefix / 'gfx/tilesets.asm'
|
path = prefix / 'gfx/tilesets.asm'
|
@ -1,8 +1,8 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from parse import number
|
from parse.generic import number
|
||||||
from parse.generic import macro_line
|
from parse.generic import tokenize
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class TilesetHeader:
|
class TilesetHeader:
|
||||||
@ -21,7 +21,7 @@ class TilesetHeader:
|
|||||||
def coll(self):
|
def coll(self):
|
||||||
return f"{self.name}_Coll"
|
return f"{self.name}_Coll"
|
||||||
|
|
||||||
tokenize_lines = partial(macro_line.tokenize_lines, prefix="tileset")
|
tokenize_lines = partial(tokenize.lines, prefix="tileset")
|
||||||
|
|
||||||
def flatten(tokens):
|
def flatten(tokens):
|
||||||
for ts in tokens:
|
for ts in tokens:
|
Loading…
x
Reference in New Issue
Block a user