add font and text data
The original font is a bit of a mess, and includes many duplicate characters. I decided to reorganize the characters in a new set of glyphs, in derivced/font.png. This also includes very basic parsing for text data.
This commit is contained in:
parent
59bd2d8903
commit
a56def6074
1
Makefile
1
Makefile
@ -18,6 +18,7 @@ SRC += main.cpp
|
|||||||
SRC += input.cpp
|
SRC += input.cpp
|
||||||
SRC += vram.cpp
|
SRC += vram.cpp
|
||||||
SRC += font.cpp
|
SRC += font.cpp
|
||||||
|
SRC += graphic.cpp
|
||||||
|
|
||||||
DEP = $(patsubst %.cpp,%.cpp.d,$(SRC))
|
DEP = $(patsubst %.cpp,%.cpp.d,$(SRC))
|
||||||
|
|
||||||
|
47
control.hpp
Normal file
47
control.hpp
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <stdint.h>
|
||||||
|
|
||||||
|
struct control_t {
|
||||||
|
static constexpr uint8_t next = 0x80;
|
||||||
|
static constexpr uint8_t line = 0x81;
|
||||||
|
static constexpr uint8_t para = 0x82;
|
||||||
|
static constexpr uint8_t cont = 0x83;
|
||||||
|
static constexpr uint8_t done = 0x84;
|
||||||
|
static constexpr uint8_t prompt = 0x85;
|
||||||
|
static constexpr uint8_t page = 0x86;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ligatures_t {
|
||||||
|
static constexpr uint8_t tm = 0x90;
|
||||||
|
static constexpr uint8_t pc = 0x91;
|
||||||
|
static constexpr uint8_t trainer = 0x92;
|
||||||
|
static constexpr uint8_t rocket = 0x93;
|
||||||
|
static constexpr uint8_t poke = 0x94;
|
||||||
|
static constexpr uint8_t double_ellipsis = 0x95;
|
||||||
|
static constexpr uint8_t pkmn = 0x96;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct extended_t {
|
||||||
|
static constexpr uint8_t jpy = 0xa5; // ¥
|
||||||
|
static constexpr uint8_t e = 0xe9; // é
|
||||||
|
static constexpr uint8_t ellipsis = 0xa8; // …
|
||||||
|
static constexpr uint8_t pk = 0xb2; // ᴾₖ
|
||||||
|
static constexpr uint8_t mn = 0xb3; // ᴹₙ
|
||||||
|
static constexpr uint8_t times = 0xd7; // ×
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
PCCharText:: db "PC@"
|
||||||
|
TMCharText:: db "TM@"
|
||||||
|
TrainerCharText:: db "TRAINER@"
|
||||||
|
RocketCharText:: db "ROCKET@"
|
||||||
|
PlacePOKeText:: db "POKé@"
|
||||||
|
SixDotsCharText:: db "……@"
|
||||||
|
PlacePKMNText:: db "<PK><MN>@"
|
||||||
|
|
||||||
|
dict "<PLAYER>", PrintPlayerName
|
||||||
|
dict "<RIVAL>", PrintRivalName
|
||||||
|
dict "<TARGET>", PlaceMoveTargetsName
|
||||||
|
dict "<USER>", PlaceMoveUsersName
|
||||||
|
*/
|
BIN
derived/font.png
BIN
derived/font.png
Binary file not shown.
Before Width: | Height: | Size: 5.0 KiB After Width: | Height: | Size: 5.1 KiB |
36
font.cpp
36
font.cpp
@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
#include "res/font.2bpp.h"
|
#include "res/font.2bpp.h"
|
||||||
#include "font.hpp"
|
#include "font.hpp"
|
||||||
|
#include "control.hpp"
|
||||||
|
|
||||||
constexpr inline uint8_t ascii_to_font(uint8_t c)
|
constexpr inline uint8_t ascii_to_font(uint8_t c)
|
||||||
{
|
{
|
||||||
@ -15,18 +16,29 @@ constexpr inline uint8_t ascii_to_font(uint8_t c)
|
|||||||
return (c - '0') + 64;
|
return (c - '0') + 64;
|
||||||
|
|
||||||
switch (c) {
|
switch (c) {
|
||||||
case '!': return 26;
|
case '!': return 0x1a;
|
||||||
case '(': return 27;
|
case '(': return 0x1b;
|
||||||
case ')': return 28;
|
case ')': return 0x1c;
|
||||||
case ',': return 29;
|
case ',': return 0x1d;
|
||||||
case '-': return 30;
|
case '-': return 0x1e;
|
||||||
case '.': return 31;
|
case '.': return 0x1f;
|
||||||
case '/': return 58;
|
case '/': return 0x3a;
|
||||||
case ':': return 59;
|
case ':': return 0x3b;
|
||||||
case ';': return 60;
|
case ';': return 0x3c;
|
||||||
case '?': return 61;
|
case '?': return 0x3d;
|
||||||
case '[': return 62;
|
case '[': return 0x3e;
|
||||||
case ']': return 63;
|
case ']': return 0x3f;
|
||||||
|
case '`': return 0x5c; // ‘
|
||||||
|
case '\'':return 0x5d; // ’
|
||||||
|
case '^': return 0x5e; // “
|
||||||
|
case '"': return 0x5f; // ”
|
||||||
|
case extended_t::e: return 0x4a; // é
|
||||||
|
case extended_t::ellipsis: return 0x65; // …
|
||||||
|
case extended_t::pk: return 0x72; // ᴾₖ
|
||||||
|
case extended_t::mn: return 0x73; // ᴹₙ
|
||||||
|
case extended_t::jpy: return 0x78; // ¥
|
||||||
|
case extended_t::times: return 0x79; // ×
|
||||||
|
default: return 0x7e; // "invalid" character
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
70
graphic.cpp
Normal file
70
graphic.cpp
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
#include <cstdint>
|
||||||
|
|
||||||
|
#include "vdp2.h"
|
||||||
|
|
||||||
|
#include "coordinates.hpp"
|
||||||
|
#include "graphic.hpp"
|
||||||
|
|
||||||
|
#include "render_map.hpp"
|
||||||
|
|
||||||
|
struct dialog_border {
|
||||||
|
static constexpr uint8_t corner_top_left = 105;
|
||||||
|
static constexpr uint8_t corner_top_right = 106;
|
||||||
|
static constexpr uint8_t corner_bot_left = 107;
|
||||||
|
static constexpr uint8_t corner_bot_right = 108;
|
||||||
|
static constexpr uint8_t vertical_end_cap = 109;
|
||||||
|
static constexpr uint8_t vertical = 110;
|
||||||
|
static constexpr uint8_t horizontal = 111;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct battle_border {
|
||||||
|
static constexpr uint8_t horizontal = 96;
|
||||||
|
static constexpr uint8_t corner_bot_right = 97;
|
||||||
|
static constexpr uint8_t arrow_right = 98;
|
||||||
|
static constexpr uint8_t vertical = 99;
|
||||||
|
static constexpr uint8_t corner_bot_left = 100;
|
||||||
|
static constexpr uint8_t three_dots = 101;
|
||||||
|
static constexpr uint8_t vertical_end_cap = 102;
|
||||||
|
static constexpr uint8_t level = 103;
|
||||||
|
static constexpr uint8_t arrow_left = 104;
|
||||||
|
};
|
||||||
|
|
||||||
|
static inline void put_char(const uint32_t base_pattern,
|
||||||
|
const int32_t x, const int32_t y,
|
||||||
|
const uint8_t c)
|
||||||
|
{
|
||||||
|
const int32_t ix = (0x2000 / 2)
|
||||||
|
+ ((cell_offset::y + y) * 64 + (cell_offset::x + x));
|
||||||
|
vdp2.vram.u16[ix] = ((base_pattern + c) & 0xfff) | PATTERN_NAME_TABLE_1WORD__PALETTE(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
void draw_box_border(const uint32_t base_pattern,
|
||||||
|
const screen_t& top_left, const screen_t& bot_right)
|
||||||
|
{
|
||||||
|
// place corners
|
||||||
|
put_char(base_pattern, top_left.x, top_left.y, dialog_border::corner_top_left);
|
||||||
|
put_char(base_pattern, bot_right.x, top_left.y, dialog_border::corner_top_right);
|
||||||
|
put_char(base_pattern, bot_right.x, bot_right.y, dialog_border::corner_bot_right);
|
||||||
|
put_char(base_pattern, top_left.x, bot_right.y, dialog_border::corner_bot_left);
|
||||||
|
|
||||||
|
for (int32_t y = top_left.y + 1; y < bot_right.y; y++) {
|
||||||
|
// left vertical bar
|
||||||
|
put_char(base_pattern, top_left.x, y, dialog_border::vertical);
|
||||||
|
// right vertical bar
|
||||||
|
put_char(base_pattern, bot_right.x, y, dialog_border::vertical);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
for (int32_t x = top_left.x + 1; x < bot_right.x; x++) {
|
||||||
|
// top horizontal bar
|
||||||
|
put_char(base_pattern, x, top_left.y, dialog_border::horizontal);
|
||||||
|
|
||||||
|
// bottom horizontal bar
|
||||||
|
put_char(base_pattern, x, bot_right.y, dialog_border::horizontal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void dialog_t::draw(const uint32_t base_pattern)
|
||||||
|
{
|
||||||
|
draw_box_border(base_pattern, top_left, bottom_right);
|
||||||
|
}
|
9
graphic.hpp
Normal file
9
graphic.hpp
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
struct dialog_t
|
||||||
|
{
|
||||||
|
static constexpr screen_t top_left = { 0, 12};
|
||||||
|
static constexpr screen_t bottom_right = {19, 17};
|
||||||
|
|
||||||
|
static void draw(const uint32_t base_pattern);
|
||||||
|
};
|
10
main.cpp
10
main.cpp
@ -23,6 +23,8 @@
|
|||||||
#include "actor.hpp"
|
#include "actor.hpp"
|
||||||
#include "ledge_tiles.hpp"
|
#include "ledge_tiles.hpp"
|
||||||
|
|
||||||
|
#include "graphic.hpp"
|
||||||
|
|
||||||
struct draw_t {
|
struct draw_t {
|
||||||
struct {
|
struct {
|
||||||
uint16_t font; // div 32
|
uint16_t font; // div 32
|
||||||
@ -505,9 +507,9 @@ void init_vdp1()
|
|||||||
|
|
||||||
void init_vdp2()
|
void init_vdp2()
|
||||||
{
|
{
|
||||||
vdp2.reg.PRISA = PRISA__S0PRIN(7); // Sprite register 0 PRIority Number
|
vdp2.reg.PRISA = PRISA__S0PRIN(6); // Sprite register 0 PRIority Number
|
||||||
vdp2.reg.PRINA = PRINA__N0PRIN(5)
|
vdp2.reg.PRINA = PRINA__N0PRIN(5)
|
||||||
| PRINA__N1PRIN(6);
|
| PRINA__N1PRIN(7);
|
||||||
|
|
||||||
// DISP: Please make sure to change this bit from 0 to 1 during V blank.
|
// DISP: Please make sure to change this bit from 0 to 1 during V blank.
|
||||||
vdp2.reg.TVMD = ( TVMD__DISP | TVMD__LSMD__NON_INTERLACE
|
vdp2.reg.TVMD = ( TVMD__DISP | TVMD__LSMD__NON_INTERLACE
|
||||||
@ -555,7 +557,9 @@ void init_vdp2()
|
|||||||
const uint32_t base_pattern = state.draw.base_pattern.font;
|
const uint32_t base_pattern = state.draw.base_pattern.font;
|
||||||
vdp2.reg.PNCN1 = PNCN1__N1PNB__1WORD | PNCN1__N1CNSM | PNCN1__N1SCN((base_pattern >> 10) & 0x1f);
|
vdp2.reg.PNCN1 = PNCN1__N1PNB__1WORD | PNCN1__N1CNSM | PNCN1__N1SCN((base_pattern >> 10) & 0x1f);
|
||||||
const uint32_t value = ((base_pattern + 127) & 0xfff) | PATTERN_NAME_TABLE_1WORD__PALETTE(1);
|
const uint32_t value = ((base_pattern + 127) & 0xfff) | PATTERN_NAME_TABLE_1WORD__PALETTE(1);
|
||||||
fill<uint32_t>(&vdp2.vram.u32[0 / 4], value | value << 16, 0x4000);
|
fill<uint32_t>(&vdp2.vram.u32[0x2000 / 4], value | value << 16, 0x2000);
|
||||||
|
|
||||||
|
dialog_t::draw(state.draw.base_pattern.font);
|
||||||
|
|
||||||
palette_data();
|
palette_data();
|
||||||
}
|
}
|
||||||
|
@ -59,11 +59,11 @@ constexpr inline uint8_t get_block(const map_t& map, block_t block)
|
|||||||
#undef _has
|
#undef _has
|
||||||
}
|
}
|
||||||
|
|
||||||
namespace cell_offset
|
struct cell_offset
|
||||||
{
|
{
|
||||||
int32_t x = (320 - 160) / (2 * 8);
|
static constexpr int32_t x = (320 - 160) / (2 * 8);
|
||||||
int32_t y = (240 - 144) / (2 * 8);
|
static constexpr int32_t y = (240 - 144) / (2 * 8);
|
||||||
}
|
};
|
||||||
|
|
||||||
static inline void render_screen(const uint32_t base_pattern,
|
static inline void render_screen(const uint32_t base_pattern,
|
||||||
const map_t& map,
|
const map_t& map,
|
||||||
|
@ -9,9 +9,9 @@ def includes_header():
|
|||||||
|
|
||||||
def extern_collision_tile_ids():
|
def extern_collision_tile_ids():
|
||||||
for name, index in sorted_tilesets_constants_list():
|
for name, index in sorted_tilesets_constants_list():
|
||||||
tileset_header = parse.tileset_headers_list[index]
|
tileset_header = parse.tileset_headers_list()[index]
|
||||||
coll_path = tileset_header.coll()
|
coll_path = tileset_header.coll()
|
||||||
tile_ids = parse.collision_tile_ids_list[coll_path]
|
tile_ids = parse.collision_tile_ids_list()[coll_path]
|
||||||
yield f"extern uint8_t {coll_path}[{len(tile_ids)}];"
|
yield f"extern uint8_t {coll_path}[{len(tile_ids)}];"
|
||||||
|
|
||||||
def generate_header():
|
def generate_header():
|
||||||
@ -21,9 +21,9 @@ def generate_header():
|
|||||||
return out
|
return out
|
||||||
|
|
||||||
def collision_array(name, index):
|
def collision_array(name, index):
|
||||||
tileset_header = parse.tileset_headers_list[index]
|
tileset_header = parse.tileset_headers_list()[index]
|
||||||
coll_path = tileset_header.coll()
|
coll_path = tileset_header.coll()
|
||||||
tile_ids = parse.collision_tile_ids_list[coll_path]
|
tile_ids = parse.collision_tile_ids_list()[coll_path]
|
||||||
yield f"uint8_t {coll_path}[] = {{"
|
yield f"uint8_t {coll_path}[] = {{"
|
||||||
yield " ".join(
|
yield " ".join(
|
||||||
f"{tile_ix},"
|
f"{tile_ix},"
|
||||||
|
@ -87,7 +87,7 @@ def map_objects():
|
|||||||
map_headers = list(sorted_map_headers())
|
map_headers = list(sorted_map_headers())
|
||||||
for map_header in map_headers:
|
for map_header in map_headers:
|
||||||
map_name = map_header.name2.lower()
|
map_name = map_header.name2.lower()
|
||||||
map_objects = parse.map_objects_list[map_header.object()]
|
map_objects = parse.map_objects_list()[map_header.object()]
|
||||||
yield from warp_events(map_name, map_objects)
|
yield from warp_events(map_name, map_objects)
|
||||||
yield from bg_events(map_name, map_objects)
|
yield from bg_events(map_name, map_objects)
|
||||||
yield from object_events(map_name, map_objects)
|
yield from object_events(map_name, map_objects)
|
||||||
@ -95,7 +95,7 @@ def map_objects():
|
|||||||
yield "const object_t map_objects[] = {"
|
yield "const object_t map_objects[] = {"
|
||||||
for map_header in map_headers:
|
for map_header in map_headers:
|
||||||
map_name = map_header.name2.lower()
|
map_name = map_header.name2.lower()
|
||||||
map_objects = parse.map_objects_list[map_header.object()]
|
map_objects = parse.map_objects_list()[map_header.object()]
|
||||||
yield from object(map_name, map_objects)
|
yield from object(map_name, map_objects)
|
||||||
yield "};"
|
yield "};"
|
||||||
|
|
||||||
|
@ -24,14 +24,11 @@ from parse.map_header import Connection
|
|||||||
directions = sorted(['north', 'south', 'east', 'west'])
|
directions = sorted(['north', 'south', 'east', 'west'])
|
||||||
|
|
||||||
def sorted_map_constants_list():
|
def sorted_map_constants_list():
|
||||||
return sorted(parse.map_constants_list.items(), key=default_sort)
|
return sorted(parse.map_constants_list().items(), key=default_sort)
|
||||||
|
|
||||||
def sorted_map_headers():
|
def sorted_map_headers():
|
||||||
map_constants_list = sorted_map_constants_list()
|
map_constants_list = sorted_map_constants_list()
|
||||||
map_headers_dict = dict((map_header.name2, map_header) for map_header in parse.map_headers)
|
map_headers_dict = dict((map_header.name2, map_header) for map_header in parse.map_headers())
|
||||||
# hack to remove unused/duplicate underground_path_route_7
|
|
||||||
#map_headers = sorted(parse.map_headers, key=lambda m: m.name2)
|
|
||||||
#return filter(lambda m: m.name1 != "UndergroundPathRoute7Copy", map_headers)
|
|
||||||
return (
|
return (
|
||||||
map_headers_dict[map_name2] for map_name2, _ in map_constants_list
|
map_headers_dict[map_name2] for map_name2, _ in map_constants_list
|
||||||
if map_name2 in map_headers_dict
|
if map_name2 in map_headers_dict
|
||||||
@ -45,7 +42,7 @@ def includes_header():
|
|||||||
yield '#include "tilesets.hpp"'
|
yield '#include "tilesets.hpp"'
|
||||||
yield ""
|
yield ""
|
||||||
for map_header in sorted_map_headers():
|
for map_header in sorted_map_headers():
|
||||||
block_path = parse.maps_blocks_list[map_header.blocks()]
|
block_path = parse.maps_blocks_list()[map_header.blocks()]
|
||||||
yield f'#include "../res/{block_path}.h"'
|
yield f'#include "../res/{block_path}.h"'
|
||||||
yield ""
|
yield ""
|
||||||
|
|
||||||
@ -109,8 +106,8 @@ def connections(map_header):
|
|||||||
yield "},"
|
yield "},"
|
||||||
|
|
||||||
def map(map_header):
|
def map(map_header):
|
||||||
block_path = parse.maps_blocks_list[map_header.blocks()]
|
block_path = parse.maps_blocks_list()[map_header.blocks()]
|
||||||
map_constant = parse.map_constants_list[map_header.name2]
|
map_constant = parse.map_constants_list()[map_header.name2]
|
||||||
return [
|
return [
|
||||||
f"[map_t::{map_header.name2.lower()}] = {{",
|
f"[map_t::{map_header.name2.lower()}] = {{",
|
||||||
".blocks = {",
|
".blocks = {",
|
||||||
|
@ -14,7 +14,7 @@ from generate.binary import binary_res, start_size_value
|
|||||||
from generate.generate import renderer
|
from generate.generate import renderer
|
||||||
|
|
||||||
def sorted_sprite_constants_list():
|
def sorted_sprite_constants_list():
|
||||||
return sorted(parse.sprite_constants_list.items(), key=default_sort)
|
return sorted(parse.sprite_constants_list().items(), key=default_sort)
|
||||||
|
|
||||||
def includes_header():
|
def includes_header():
|
||||||
yield '#pragma once'
|
yield '#pragma once'
|
||||||
@ -25,8 +25,8 @@ def includes_header():
|
|||||||
if name == 'SPRITE_NONE':
|
if name == 'SPRITE_NONE':
|
||||||
continue
|
continue
|
||||||
assert index != 0, index
|
assert index != 0, index
|
||||||
spritesheet = parse.spritesheets_list[index - 1]
|
spritesheet = parse.spritesheets_list()[index - 1]
|
||||||
sprite_path = parse.gfx_sprites_list[spritesheet.name]
|
sprite_path = parse.gfx_sprites_list()[spritesheet.name]
|
||||||
yield f'#include "../res/{sprite_path}.h"'
|
yield f'#include "../res/{sprite_path}.h"'
|
||||||
|
|
||||||
def includes_source():
|
def includes_source():
|
||||||
@ -76,8 +76,8 @@ def sprite(name, index):
|
|||||||
else:
|
else:
|
||||||
# spritesheets_list does not include SPRITE_NULL at index 0
|
# spritesheets_list does not include SPRITE_NULL at index 0
|
||||||
assert index != 0, index
|
assert index != 0, index
|
||||||
spritesheet = parse.spritesheets_list[index - 1]
|
spritesheet = parse.spritesheets_list()[index - 1]
|
||||||
sprite_path = parse.gfx_sprites_list[spritesheet.name]
|
sprite_path = parse.gfx_sprites_list()[spritesheet.name]
|
||||||
sprite_count = spritesheet.sprite_count
|
sprite_count = spritesheet.sprite_count
|
||||||
return [
|
return [
|
||||||
f"[spritesheet_t::{sprite_name(name)}] = {{",
|
f"[spritesheet_t::{sprite_name(name)}] = {{",
|
||||||
|
@ -5,7 +5,7 @@ from generate.generate import renderer
|
|||||||
from generate.binary import start_size_value
|
from generate.binary import start_size_value
|
||||||
|
|
||||||
def sorted_tilesets_constants_list():
|
def sorted_tilesets_constants_list():
|
||||||
return sorted(parse.tileset_constants_list.items(), key=default_sort)
|
return sorted(parse.tileset_constants_list().items(), key=default_sort)
|
||||||
|
|
||||||
def includes_header():
|
def includes_header():
|
||||||
yield "#pragma once"
|
yield "#pragma once"
|
||||||
@ -13,11 +13,11 @@ def includes_header():
|
|||||||
yield '#include "../start_size.hpp"'
|
yield '#include "../start_size.hpp"'
|
||||||
yield ""
|
yield ""
|
||||||
for tileset_name, _ in sorted_tilesets_constants_list():
|
for tileset_name, _ in sorted_tilesets_constants_list():
|
||||||
tileset_index = parse.tileset_constants_list[tileset_name]
|
tileset_index = parse.tileset_constants_list()[tileset_name]
|
||||||
tileset_header = parse.tileset_headers_list[tileset_index]
|
tileset_header = parse.tileset_headers_list()[tileset_index]
|
||||||
|
|
||||||
blockset_path = parse.gfx_tilesets_list[tileset_header.blockset()]
|
blockset_path = parse.gfx_tilesets_list()[tileset_header.blockset()]
|
||||||
gfx_path = parse.gfx_tilesets_list[tileset_header.gfx()]
|
gfx_path = parse.gfx_tilesets_list()[tileset_header.gfx()]
|
||||||
|
|
||||||
yield f'#include "../res/{blockset_path}.h"'
|
yield f'#include "../res/{blockset_path}.h"'
|
||||||
yield f'#include "../res/{gfx_path}.h"'
|
yield f'#include "../res/{gfx_path}.h"'
|
||||||
@ -53,10 +53,10 @@ def generate_header():
|
|||||||
return out
|
return out
|
||||||
|
|
||||||
def blockset_tileset(name, index):
|
def blockset_tileset(name, index):
|
||||||
tileset_header = parse.tileset_headers_list[index]
|
tileset_header = parse.tileset_headers_list()[index]
|
||||||
|
|
||||||
blockset_path = parse.gfx_tilesets_list[tileset_header.blockset()]
|
blockset_path = parse.gfx_tilesets_list()[tileset_header.blockset()]
|
||||||
gfx_path = parse.gfx_tilesets_list[tileset_header.gfx()]
|
gfx_path = parse.gfx_tilesets_list()[tileset_header.gfx()]
|
||||||
coll_path = tileset_header.coll()
|
coll_path = tileset_header.coll()
|
||||||
|
|
||||||
return [
|
return [
|
||||||
|
@ -1,5 +1,2 @@
|
|||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
from parse import parse
|
from parse import parse
|
||||||
|
|
||||||
for i in parse.collision_tile_ids_list.items():
|
|
||||||
pprint(i)
|
|
||||||
|
13
tools/parse/line.py
Normal file
13
tools/parse/line.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
def skip_whitespace(lines):
|
||||||
|
i = 0
|
||||||
|
while lines[i:] and not lines[i].strip():
|
||||||
|
i += 1
|
||||||
|
return lines[i:]
|
||||||
|
|
||||||
|
def next_line(lines):
|
||||||
|
lines = skip_whitespace(lines)
|
||||||
|
if not lines:
|
||||||
|
return [], None
|
||||||
|
else:
|
||||||
|
line = lines[0].strip()
|
||||||
|
return lines[1:], line
|
@ -71,7 +71,7 @@ def flatten(tokens):
|
|||||||
# \2_WIDTH
|
# \2_WIDTH
|
||||||
# \2_HEIGHT
|
# \2_HEIGHT
|
||||||
map_headers = [s for s in tokens if s[0] == 'map_header']
|
map_headers = [s for s in tokens if s[0] == 'map_header']
|
||||||
assert len(map_headers) == 1
|
assert len(map_headers) == 1, map_headers
|
||||||
map_header, = map_headers
|
map_header, = map_headers
|
||||||
_, (name1, name2, tileset, connection_mask0) = map_header
|
_, (name1, name2, tileset, connection_mask0) = map_header
|
||||||
connections = [s[1] for s in tokens if s[0] == 'connection']
|
connections = [s[1] for s in tokens if s[0] == 'connection']
|
||||||
|
@ -18,25 +18,36 @@ from parse import sprite_constants
|
|||||||
|
|
||||||
prefix = Path(sys.argv[1])
|
prefix = Path(sys.argv[1])
|
||||||
|
|
||||||
map_headers = map_header.parse_all(prefix)
|
def memoize(f):
|
||||||
maps_blocks_list = maps_blocks.parse(prefix)
|
value = None
|
||||||
tileset_constants_list = tileset_constants.parse(prefix)
|
def inner():
|
||||||
tileset_headers_list = tileset_headers.parse(prefix)
|
nonlocal value
|
||||||
gfx_tilesets_list = gfx_tilesets.parse(prefix)
|
if value is None:
|
||||||
# tileset coll
|
value = f()
|
||||||
collision_tile_ids_list = collision_tile_ids.parse(prefix)
|
return value
|
||||||
map_objects_list = map_objects.parse_all(prefix)
|
return inner
|
||||||
hidden_objects_list = hidden_objects.parse(prefix)
|
|
||||||
map_constants_list = map_constants.parse(prefix)
|
map_constants_list = memoize(lambda: map_constants.parse(prefix))
|
||||||
|
map_headers = memoize(lambda: map_header.parse_all(prefix))
|
||||||
|
maps_blocks_list = memoize(lambda: maps_blocks.parse(prefix))
|
||||||
|
map_objects_list = memoize(lambda: map_objects.parse_all(prefix))
|
||||||
|
|
||||||
|
tileset_constants_list = memoize(lambda: tileset_constants.parse(prefix))
|
||||||
|
tileset_headers_list = memoize(lambda: tileset_headers.parse(prefix))
|
||||||
|
gfx_tilesets_list = memoize(lambda: gfx_tilesets.parse(prefix))
|
||||||
|
collision_tile_ids_list = memoize(lambda: collision_tile_ids.parse(prefix))
|
||||||
|
hidden_objects_list = memoize(lambda: hidden_objects.parse(prefix))
|
||||||
|
|
||||||
|
|
||||||
# need:
|
# need:
|
||||||
#data/tilesets/pair_collision_tile_ids.asm
|
#data/tilesets/pair_collision_tile_ids.asm
|
||||||
#ledge_tiles.asm
|
|
||||||
#cut_tree_blocks.asm
|
#cut_tree_blocks.asm
|
||||||
|
|
||||||
# home/vcopy: animations
|
# home/vcopy: animations
|
||||||
|
|
||||||
# sprites
|
# sprites
|
||||||
gfx_sprites_list = gfx_sprites.parse(prefix)
|
gfx_sprites_list = memoize(lambda: gfx_sprites.parse(prefix))
|
||||||
spritesheets_list = spritesheets.parse(prefix)
|
spritesheets_list = memoize(lambda: spritesheets.parse(prefix))
|
||||||
sprite_constants_list = sprite_constants.parse(prefix)
|
sprite_constants_list = memoize(lambda: sprite_constants.parse(prefix))
|
||||||
|
|
||||||
|
# text
|
||||||
|
111
tools/parse/scripts.py
Normal file
111
tools/parse/scripts.py
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
from parse.line import next_line, skip_whitespace
|
||||||
|
|
||||||
|
def parse_dw_const(line):
|
||||||
|
dw_const, args = line.split(maxsplit=1)
|
||||||
|
assert dw_const == "dw_const", line
|
||||||
|
args = args.split(';')[0]
|
||||||
|
args = [arg.strip() for arg in args.split(',')]
|
||||||
|
assert len(args) == 2, args
|
||||||
|
assert (
|
||||||
|
args[1].startswith('TEXT_')
|
||||||
|
or args[1].startswith('SCRIPT_')
|
||||||
|
), args[1]
|
||||||
|
return args
|
||||||
|
|
||||||
|
def tokenize_textpointer(lines):
|
||||||
|
pointers = []
|
||||||
|
while lines:
|
||||||
|
lines_next, line = next_line(lines)
|
||||||
|
if line is None:
|
||||||
|
break
|
||||||
|
if line.startswith("def_text_pointers"):
|
||||||
|
pass
|
||||||
|
elif line.startswith("text_end"):
|
||||||
|
pass
|
||||||
|
elif line.startswith(";"):
|
||||||
|
pass
|
||||||
|
elif line.startswith("dw_const"):
|
||||||
|
pointer = parse_dw_const(line)
|
||||||
|
pointers.append(pointer)
|
||||||
|
else:
|
||||||
|
assert (
|
||||||
|
line is None
|
||||||
|
or line.endswith(':')
|
||||||
|
or line.startswith("def_trainers")
|
||||||
|
), line
|
||||||
|
break
|
||||||
|
lines = lines_next
|
||||||
|
|
||||||
|
return lines, pointers
|
||||||
|
|
||||||
|
def tokenize_textpointers(lines):
|
||||||
|
ix = 0
|
||||||
|
# skip lines until _TextPointers is found
|
||||||
|
while lines:
|
||||||
|
lines, line = next_line(lines)
|
||||||
|
if line is None:
|
||||||
|
break
|
||||||
|
if not line.endswith('_TextPointers:'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
name = line[:-1]
|
||||||
|
lines, textpointers = tokenize_textpointer(lines)
|
||||||
|
return name, textpointers
|
||||||
|
|
||||||
|
def tokenize_text_far(lines):
|
||||||
|
lines, far = next_line(lines)
|
||||||
|
lines, end = next_line(lines)
|
||||||
|
simple_far = (
|
||||||
|
far is not None
|
||||||
|
and end is not None
|
||||||
|
and far.split()[0] == 'text_far'
|
||||||
|
and end.split()[0] == 'text_end'
|
||||||
|
)
|
||||||
|
if not simple_far:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
_, far_label = far.split()
|
||||||
|
assert far_label.startswith('_')
|
||||||
|
return far_label
|
||||||
|
|
||||||
|
def find_text_far(labels, lines):
|
||||||
|
while lines:
|
||||||
|
lines, line = next_line(lines)
|
||||||
|
if line is None:
|
||||||
|
break
|
||||||
|
if line.endswith(':') and line[:-1] in labels:
|
||||||
|
label = line[:-1]
|
||||||
|
text_far = tokenize_text_far(lines)
|
||||||
|
if text_far is not None:
|
||||||
|
yield label, text_far
|
||||||
|
|
||||||
|
def tokenize(lines):
|
||||||
|
tokens = tokenize_textpointers(lines)
|
||||||
|
if tokens is None:
|
||||||
|
return None
|
||||||
|
name, textpointers = tokens
|
||||||
|
labels_l = [label for label, _ in textpointers]
|
||||||
|
labels = set(labels_l)
|
||||||
|
|
||||||
|
# now, resolve text_far
|
||||||
|
text_far_l = list(find_text_far(labels, lines))
|
||||||
|
text_far_d = dict(text_far_l)
|
||||||
|
assert len(text_far_l) == len(text_far_d)
|
||||||
|
textpointers_d = dict(
|
||||||
|
(v, k) for k, v in textpointers
|
||||||
|
)
|
||||||
|
return name, (textpointers_d, text_far_d)
|
||||||
|
|
||||||
|
def parse(path):
|
||||||
|
with open(path) as f:
|
||||||
|
original_lines = f.read().split('\n')
|
||||||
|
tokens = tokenize(original_lines)
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
def parse_all(prefix):
|
||||||
|
base_path = prefix / 'scripts'
|
||||||
|
paths = [p for p in base_path.iterdir()
|
||||||
|
# fixme: ViridianMart is weird
|
||||||
|
if p.is_file() and 'ViridianMart.asm' not in str(p)]
|
||||||
|
return dict(filter(lambda x: x is not None,
|
||||||
|
(parse(path) for path in paths)))
|
75
tools/parse/text.py
Normal file
75
tools/parse/text.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
from parse.line import next_line, skip_whitespace
|
||||||
|
|
||||||
|
def parse_label(lines):
|
||||||
|
lines, line = next_line(lines)
|
||||||
|
assert line[-2:] == '::', line
|
||||||
|
name = line.removesuffix('::')
|
||||||
|
return lines, name
|
||||||
|
|
||||||
|
string_tokens = {"text", "cont", "para", "line"}
|
||||||
|
|
||||||
|
def parse_string(line):
|
||||||
|
line = line.strip()
|
||||||
|
assert line[0] == '"'
|
||||||
|
line = line[1:]
|
||||||
|
for i, c in enumerate(line):
|
||||||
|
if c != '\\' and line[i] == '"':
|
||||||
|
return line[:i], line[i+1:]
|
||||||
|
|
||||||
|
def parse_args(line):
|
||||||
|
return [i.strip() for i in line.split(',')]
|
||||||
|
|
||||||
|
def parse_body(lines):
|
||||||
|
body = []
|
||||||
|
while lines:
|
||||||
|
lines, line = next_line(lines)
|
||||||
|
if line in {"text_end", "done", "prompt"}:
|
||||||
|
body.append((line,))
|
||||||
|
break
|
||||||
|
type, *rest = line.split(maxsplit=1)
|
||||||
|
if type in string_tokens:
|
||||||
|
value, = rest
|
||||||
|
string_value, rest = parse_string(value)
|
||||||
|
assert rest == "", rest
|
||||||
|
body.append((type, string_value))
|
||||||
|
elif type == 'text_ram':
|
||||||
|
value, = rest
|
||||||
|
assert value[0] == "w", value
|
||||||
|
body.append((type, value))
|
||||||
|
elif type == 'text_start':
|
||||||
|
body.append((type,))
|
||||||
|
elif type in {'text_decimal', 'text_bcd'}:
|
||||||
|
value, = rest
|
||||||
|
body.append((type, parse_args(value)))
|
||||||
|
else:
|
||||||
|
assert False, line
|
||||||
|
|
||||||
|
return lines, body
|
||||||
|
|
||||||
|
def tokenize_text(lines):
|
||||||
|
lines, name = parse_label(lines)
|
||||||
|
lines, body = parse_body(lines)
|
||||||
|
return lines, (name, body)
|
||||||
|
|
||||||
|
def tokenize(lines):
|
||||||
|
while lines:
|
||||||
|
lines, tokens = tokenize_text(lines)
|
||||||
|
lines = skip_whitespace(lines)
|
||||||
|
yield tokens
|
||||||
|
|
||||||
|
def parse(path):
|
||||||
|
with open(path) as f:
|
||||||
|
tokens = list(tokenize(f.read().split('\n')))
|
||||||
|
d = dict(tokens)
|
||||||
|
assert len(tokens) == len(d)
|
||||||
|
return d
|
||||||
|
|
||||||
|
def parse_all(prefix):
|
||||||
|
base_path = prefix / 'text'
|
||||||
|
paths = [p for p in base_path.iterdir() if p.is_file()]
|
||||||
|
return [parse(path) for path in paths]
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from pprint import pprint
|
||||||
|
from pathlib import Path
|
||||||
|
pprint(parse_all(Path(sys.argv[1])))
|
Loading…
x
Reference in New Issue
Block a user