This commit is contained in:
Zack Buhman 2023-07-24 13:25:39 -07:00
commit c55300ea75
23 changed files with 860 additions and 0 deletions

11
.gitignore vendored Normal file
View File

@ -0,0 +1,11 @@
*.gch
*.o
*.elf
*.bin
*.iso
*.cue
*.out
*.d
*.pyc
.#*
res/

67
Makefile Normal file
View File

@ -0,0 +1,67 @@
CFLAGS = -Isaturn
OPT ?= -Og
LIB = ./saturn
SRC = main.o
DEP = $(patsubst %.o,%.d,$(SRC))
res = $(subst pokered/,res/,$(patsubst %.$(1),%.$(1).o,$(wildcard $(2)*.$(1))))
res_png = $(subst pokered/,res/,$(patsubst %.png,%.$(1).o,$(wildcard $(2)*.png)))
GFX_TILESETS = $(call res_png,4bpp,pokered/gfx/tilesets/)
GFX_BLOCKSETS = $(call res,bst,pokered/gfx/blocksets/)
MAPS_BLOCKS = $(call res,blk,pokered/maps/)
GENERATED = $(GFX_TILESETS) $(GFX_BLOCKSETS) $(MAPS_BLOCKS)
OBJ = $(SRC) $(GENERATED)
all: main.cue
include $(LIB)/common.mk
-include $(DEP)
define COPY_BINARY
@mkdir -p $(dir $@)
cp -a $< $@
endef
generated: $(GENERATED)
res/%.4bpp: pokered/%.png
@mkdir -p $(dir $@)
python tools/png_to_4bpp.py $< $@
%.4bpp.h:
$(BUILD_BINARY_H)
%.4bpp.o: %.4bpp %.4bpp.h
$(BUILD_BINARY_O)
res/%.blk: pokered/%.blk
$(COPY_BINARY)
%.blk.h:
$(BUILD_BINARY_H)
%.blk.o: %.blk %.blk.h
$(BUILD_BINARY_O)
res/%.bst: pokered/%.bst
$(COPY_BINARY)
%.bst.h:
$(BUILD_BINARY_H)
%.bst.o: %.bst %.bst.h
$(BUILD_BINARY_O)
%.o: | $(GFX_TILESETS)
main.elf: $(OBJ)
clean: clean-sh
clean-sh:
rm -rf res
PHONY: generated-headers

1
common Symbolic link
View File

@ -0,0 +1 @@
../saturn-examples/common

133
main.cpp Normal file
View File

@ -0,0 +1,133 @@
#include <cstdint>
#include "vdp2.h"
#include "common/copy.hpp"
#include "common/vdp2_func.hpp"
#include "test.cpp"
constexpr inline uint16_t rgb15(int32_t r, int32_t g, int32_t b)
{
return ((b & 31) << 10) | ((g & 31) << 5) | ((r & 31) << 0);
}
void palette_data()
{
vdp2.cram.u16[0] = rgb15( 0, 0, 0);
vdp2.cram.u16[1] = rgb15(10, 10, 10);
vdp2.cram.u16[2] = rgb15(21, 21, 21);
vdp2.cram.u16[3] = rgb15(31, 31, 31);
}
uint32_t cell_data(const start_size_t& buf, const uint32_t top)
{
// round to nearest multiple of 32
const uint32_t table_size = (buf.size + 0x20 - 1) & (-0x20);
const uint32_t base_address = top - table_size; // in bytes
copy<uint32_t>(&vdp2.vram.u32[(base_address / 4)],
reinterpret_cast<uint32_t const * const>(buf.start),
buf.size);
return base_address;
}
constexpr inline void render_block(const uint32_t base_pattern,
const tileset_t& tileset,
const uint32_t map_x,
const uint32_t map_y,
const uint8_t block)
{
for (uint32_t block_y = 0; block_y < 4; block_y++) {
for (uint32_t block_x = 0; block_x < 4; block_x++) {
const uint32_t block_ix = 4 * block_y + block_x;
const uint8_t tile_xy = tileset.blockset.start[block * 4 * 4 + block_ix];
const uint8_t tile_x = (tile_xy >> 0) & 0xf;
const uint8_t tile_y = (tile_xy >> 4) & 0xf;
const uint32_t tile_ix = tile_y * 16 + tile_x;
const uint32_t cell_y = map_y * 4 + block_y;
const uint32_t cell_x = map_x * 4 + block_x;
vdp2.vram.u16[64 * cell_y + cell_x] = (base_pattern & 0xfff) + tile_ix;
//vdp2.vram.u32[64 * cell_y + cell_x] = base_pattern + tile_ix;
}
}
}
void render(const uint32_t base_pattern)
{
const map_t& map = maps[map_t::pallet_town];
for (uint32_t map_y = 0; map_y < map.height; map_y++) {
for (uint32_t map_x = 0; map_x < map.width; map_x++) {
const uint8_t block = map.blocks.start[map.width * map_y + map_x];
render_block(base_pattern,
tilesets[map.tileset],
map_x,
map_y,
block);
}
}
}
void main()
{
v_blank_in();
// DISP: Please make sure to change this bit from 0 to 1 during V blank.
vdp2.reg.TVMD = ( TVMD__DISP | TVMD__LSMD__NON_INTERLACE
| TVMD__VRESO__240 | TVMD__HRESO__NORMAL_320);
/* set the color mode to 5bits per channel, 1024 colors */
vdp2.reg.RAMCTL = RAMCTL__CRKTE | RAMCTL__CRMD__RGB_5BIT_1024 | RAMCTL__VRAMD | RAMCTL__VRBMD;
/* enable display of NBG0 */
vdp2.reg.BGON = BGON__N0ON;
/* set character format for NBG0 to palettized 16 color
set enable "cell format" for NBG0
set character size for NBG0 to 1x1 cell */
vdp2.reg.CHCTLA = CHCTLA__N0CHCN__16_COLOR
| CHCTLA__N0BMEN__CELL_FORMAT
| CHCTLA__N0CHSZ__1x1_CELL;
/* plane size */
vdp2.reg.PLSZ = PLSZ__N0PLSZ__1x1;
/* map plane offset
1-word: value of bit 6-0 * 0x2000
2-word: value of bit 5-0 * 0x4000
*/
constexpr int plane_a = 0;
constexpr int plane_a_offset = plane_a * 0x2000;
constexpr int page_size = 64 * 64 * 2; // N0PNB__1WORD (16-bit)
constexpr int plane_size = page_size * 1;
vdp2.reg.CYCA0 = 0xeeeeeeee;
vdp2.reg.CYCA1 = 0xeeeeeeee;
vdp2.reg.CYCB0 = 0xeeeeeeee;
vdp2.reg.CYCB1 = 0xeeeeeeee;
vdp2.reg.MPOFN = MPOFN__N0MP(0); // bits 8~6
vdp2.reg.MPABN0 = MPABN0__N0MPB(0) | MPABN0__N0MPA(plane_a); // bits 5~0
vdp2.reg.MPCDN0 = MPABN0__N0MPD(0) | MPABN0__N0MPC(0); // bits 5~0
uint32_t top = (sizeof (union vdp2_vram));// - ((sizeof (union vdp2_vram)) / 8);
palette_data();
uint32_t base_address = top = cell_data(tilesets[tileset_t::overworld].tileset, top);
uint32_t base_pattern = base_address / 32;
/* use 1-word (16-bit) pattern names */
vdp2.reg.PNCN0 = PNCN0__N0PNB__1WORD | PNCN0__N0CNSM | PNCN0__N0SCN((base_pattern >> 10) & 0x1f);
//vdp2.reg.PNCN0 = PNCN0__N0PNB__2WORD | PNCN0__N0CNSM;
render(base_pattern);
vdp2.reg.CYCA0 = 0x0fff'ffff;
vdp2.reg.CYCA1 = 0xffff'ffff;
vdp2.reg.CYCB0 = 0xffff'ffff;
vdp2.reg.CYCB1 = 0x4fff'ffff;
}

1
pokered Symbolic link
View File

@ -0,0 +1 @@
../pokered/

1
saturn Symbolic link
View File

@ -0,0 +1 @@
../saturn

67
tileset.hpp Normal file
View File

@ -0,0 +1,67 @@
#pragma once
#include "res/gfx/tilesets/cavern.4bpp.h"
#include "res/gfx/tilesets/cemetery.4bpp.h"
#include "res/gfx/tilesets/club.4bpp.h"
#include "res/gfx/tilesets/facility.4bpp.h"
#include "res/gfx/tilesets/forest.4bpp.h"
#include "res/gfx/tilesets/gate.4bpp.h"
#include "res/gfx/tilesets/gym.4bpp.h"
#include "res/gfx/tilesets/house.4bpp.h"
#include "res/gfx/tilesets/interior.4bpp.h"
#include "res/gfx/tilesets/lab.4bpp.h"
#include "res/gfx/tilesets/lobby.4bpp.h"
#include "res/gfx/tilesets/mansion.4bpp.h"
#include "res/gfx/tilesets/overworld.4bpp.h"
#include "res/gfx/tilesets/plateau.4bpp.h"
#include "res/gfx/tilesets/pokecenter.4bpp.h"
#include "res/gfx/tilesets/reds_house.4bpp.h"
#include "res/gfx/tilesets/ship.4bpp.h"
#include "res/gfx/tilesets/ship_port.4bpp.h"
#include "res/gfx/tilesets/underground.4bpp.h"
struct tileset {
enum {
cavern,
cemetery,
club,
facility,
forest,
gate,
gym,
house,
interior,
lab,
lobby,
mansion,
overworld,
plateau,
pokecenter,
reds_house,
ship,
ship_port,
underground,
};
static uint32_t pattern_name(uint32_t graphics_ix);
static uint32_t load(uint32_t top, uint32_t i);
};
struct buf_t {
uint32_t * buf;
uint32_t size;
uint32_t base_address;
};
extern uint32_t cell_data(const buf_t& buf, const uint32_t top);
uint32_t tileset::pattern_name(uint32_t graphics_ix)
{
return tilesets[graphics_ix].base_address / 32;
}
uint32_t tileset::load(uint32_t top, uint32_t i)
{
tilesets[i].base_address = top = cell_data(tilesets[i], top);
return top;
}

View File

@ -0,0 +1,26 @@
from pprint import pprint
from parse import parse
def generate():
pprint(parse.maps_blocks_list)
#pprint(parse.map_headers)
#pprint(parse.tileset_constants_list)
#pprint(parse.tileset_headers_list)
#pprint(parse.gfx_tilesets_list)
generate()
"""
map_headers[0].tileset == 'OVERWORLD'
map_headers[0].width() == 'PALLET_TOWN_WIDTH'
map_headers[0].height() == 'PALLET_TOWN_HEIGHT'
map_headers[0].blocks() == 'PalletTown_Blocks'
maps_blocks['PalletTown_Blocks'] == 'maps/PalletTown.blk'
tileset_constants_list['OVERWORLD'] == 0
tileset_headers_list[0].name == 'Overworld'
tileset_headers_list[0].blockset() == 'Overworld_Block'
tileset_headers_list[0].gfx() == 'Overworld_GFX'
gfx_tilesets_list['Overworld_Block'] == 'gfx/blocksets/overworld.bst'
gfx_tilesets_list['Overworld_GFX'] == 'gfx/tilesets/overworld.2bpp'
"""

View File

@ -0,0 +1,15 @@
from os import path
from generate.generate import prefix
def as_obj_binary(path):
p0, _ = str(path).splitext()
p = p0.replace('.', '_').replace('/', '_')
return f"_binary_{p}"
def g(block_path):
path = prefix / block_path
obj_binary = as_obj_binary(path)
"forest.4bpp.cell.h"
def f(blocks_list):

View File

@ -0,0 +1,3 @@
from pathlib import Path
prefix = Path(sys.argv[2])

View File

@ -0,0 +1,32 @@
from parse.maps_blocks import tokenize_block
from parse.map_header import tokenize_line
from parse import number
def tokenize_lines(lines):
for line in lines:
if '_Coll:' in line:
yield tokenize_block(line, delim='::')
elif 'coll_tiles' in line:
tokens, = tokenize_block(line, delim='::')
yield tokenize_line(tokens)
def flatten(tokens):
stack = []
for t in tokens:
if t[0] == 'coll_tiles':
tile_ids = t[1] if len(t) == 2 else []
for name in stack:
yield name, list(map(number.parse, tile_ids))
stack = []
elif len(t) == 1:
name, = t
stack.append(name)
def parse(prefix):
path = prefix / 'data/tilesets/collision_tile_ids.asm'
with open(path) as f:
tokens = tokenize_lines(f.read().split('\n'))
l = list(flatten(tokens))
d = dict(l)
assert len(l) == len(d)
return d

View File

@ -0,0 +1,17 @@
from parse.maps_blocks import tokenize_block, flatten
def tokenize_lines(lines):
for line in lines:
if '_GFX:' in line or '_Block:' in line:
yield tokenize_block(line, delim='::')
def parse(prefix):
path = prefix / 'gfx/tilesets.asm'
with open(path) as f:
tokens = tokenize_lines(f.read().split('\n'))
l = list(flatten(tokens,
endings=['_GFX', '_Block'],
base_path='gfx/'))
d = dict(l)
assert len(l) == len(d)
return d

View File

@ -0,0 +1,81 @@
from dataclasses import dataclass
from parse import number
from parse import map_header
def tokenize_label(line):
return line.split(':')[0].strip()
def tokenize_lines(lines):
for line in lines:
if line.strip().endswith(':'):
yield (tokenize_label(line),)
elif ('dw ' in line or 'db ' in line) and ' \\' not in line:
yield map_header.tokenize_line(line)
elif 'hidden_object' in line or 'hidden_text_predef' in line:
yield map_header.tokenize_line(line)
def flatten0(tokens):
stack = []
label = None
for t in tokens:
if len(t) == 1:
if label is not None:
yield (label, stack)
stack = []
label = None
assert label is None
label, = t
else:
stack.append(t)
@dataclass
class HiddenObject:
location: tuple[int, int]
item_id: str
object_routine: str
@dataclass
class HiddenObjects:
label: str
hidden_objects: HiddenObject
def flatten(f0):
for label, values in f0:
if values[0][0] in {"db", "dw"} and values[0][1] != ['-1']:
def vals():
for v in values:
assert len(v) == 2
v0, v1 = v
assert len(v1) == 1
if v0 in {"db", "dw"}:
yield v1[0]
yield label, list(vals())
else:
assert label.endswith("Objects"), name
def vals():
for value in values:
macro, args = value
if macro in {'hidden_object', 'hidden_text_predef'}:
yield args
else:
assert macro == 'db', macro
yield HiddenObjects(
label,
[
HiddenObject(
location=tuple(map(number.parse, [x, y])),
item_id=item_id,
object_routine=object_routine
)
for x, y, item_id, object_routine
in vals()
]
)
def parse(prefix):
path = prefix / "data/events/hidden_objects.asm"
with open(path) as f:
tokens = list(tokenize_lines(f.read().split('\n')))
return list(flatten(flatten0(tokens)))

View File

@ -0,0 +1,33 @@
from dataclasses import dataclass
from parse.map_header import tokenize_line
def tokenize_map_const(line):
return tokenize_line(line)
def tokenize_lines(lines):
for line in lines:
if "map_const" in line:
yield tokenize_map_const(line)
@dataclass
class MapConstant:
name: str
width: int
height: int
def flatten(tokens):
for macro, args in tokens:
if macro == 'map_const':
name, width, height = args
yield MapConstant(
name,
int(width),
int(height)
)
def parse(prefix):
path = prefix / "constants/map_constants.asm"
with open(path) as f:
tokens = tokenize_lines(f.read().split("\n"))
return list(flatten(tokens))

84
tools/parse/map_header.py Normal file
View File

@ -0,0 +1,84 @@
from dataclasses import dataclass
# data/maps/headers/AgathasRoom.asm
def tokenize_params(params):
for param in params:
if '|' in param:
yield [p.strip() for p in param.split(' |')]
else:
yield param.strip()
def tokenize_line(line):
line = line.split(';')[0].strip()
key_params = line.split(' ', maxsplit=1)
if len(key_params) == 1:
return tuple(key_params)
else:
key, params = key_params
params = [p.strip() for p in params.split(',')]
return key, list(tokenize_params(params))
def tokenize_lines(lines):
for line in filter(bool, lines):
yield tokenize_line(line)
@dataclass
class MapHeader:
name1: str
name2: str
tileset: str
connection_names: list[str] # not sure if this one is useful
connections: list[list]
def blocks(self):
return f"{self.name1}_Blocks"
def text_pointers(self):
return f"{self.name1}_TextPointers"
def script(self):
return f"{self.name1}_Script",
def object(self):
return f"{self.name1}_Object",
def width(self):
return f"{self.name2}_WIDTH",
def height(self):
return f"{self.name2}_HEIGHT",
def flatten(tokens):
# expects tokens from a single file
# PalletTown, PALLET_TOWN, OVERWORLD, NORTH | SOUTH
# dw \1_Blocks
# dw \1_TextPointers
# dw \1_Script
# dw {\1_Object}
# \2_WIDTH
# \2_HEIGHT
map_headers = [s for s in tokens if s[0] == 'map_header']
assert len(map_headers) == 1
map_header, = map_headers
_, (name1, name2, tileset, connection_mask) = map_header
connections = [s for s in tokens if s[0] == 'connection']
return MapHeader(
name1 = name1,
name2 = name2,
tileset = tileset,
connection_names = [] if connection_mask == '0' else connection_mask,
connections = [tuple(c[1]) for c in connections]
)
def parse(path):
with open(path) as f:
tokens = list(tokenize_lines(f.read().split('\n')))
return flatten(tokens)
def parse_all(prefix):
base_path = prefix / 'data/maps/headers'
paths = (p for p in base_path.iterdir() if p.is_file())
return [parse(path) for path in paths]

View File

@ -0,0 +1,96 @@
from dataclasses import dataclass
from parse import number
from parse.map_header import tokenize_line
@dataclass
class ObjectEvent:
location: tuple[int, int]
sprite_id: str
movement: str
range_or_direction: str
text_id: str
items_id_or_trainer_id_or_pokemon_id: str = None
trainer_number_or_pokemon_level: str = None
@dataclass
class WarpEvent:
location: tuple[int, int]
destination_map: str
destination_warp_id: str
@dataclass
class BgEvent:
location: tuple[int, int]
sign_id: str
@dataclass
class Object:
label: str
warp_events: list
object_events: list
bg_events: list
def tokenize_label(line):
return ('label', line.split(':')[0].strip())
def tokenize_event(line):
return list(tokenize_line(line))
def tokenize_lines(lines):
for line in lines:
if "_event " in line:
yield tokenize_event(line)
elif ':' in line:
yield tokenize_label(line)
def flatten(tokens):
label = None
warp_events = []
object_events = []
bg_events = []
for token_name, args in tokens:
location = lambda : list(map(number.parse, args[0:2]))
if token_name == 'label':
assert label is None
label = token_name
elif token_name == 'object_event':
event = ObjectEvent(
location(),
*(args[2:])
)
object_events.append(event)
elif token_name == 'warp_event':
destination_map, destination_warp_id = args[2:]
event = WarpEvent(
location(),
destination_map,
number.parse(destination_warp_id)
)
warp_events.append(event)
elif token_name == 'bg_event':
event = BgEvent(
location(),
*(args[2:])
)
bg_events.append(event)
else:
assert False, (token_name, args)
assert label is not None
return Object(
label=label,
warp_events = warp_events,
object_events = object_events,
bg_events = bg_events,
)
def parse(path):
with open(path) as f:
tokens = tokenize_lines(f.read().split('\n'))
return flatten(tokens)
def parse_all(prefix):
base_path = prefix / 'data/maps/objects'
paths = (p for p in base_path.iterdir() if p.is_file())
return [parse(path) for path in paths]

View File

@ -0,0 +1,43 @@
def tokenize_block(line, delim):
name_args = line.split(delim)
if len(name_args) == 1:
name, = name_args
return (name.split(';')[0].strip(),)
else:
name, args = name_args
if args.strip():
_, path = args.strip().split(' ')
return name, path.strip('"')
else:
return (name,)
def tokenize_lines(lines):
for line in lines:
if '_Blocks:' in line:
yield tokenize_block(line, delim=':')
def flatten(tokens, endings, base_path):
stack = []
for name_path in tokens:
if len(name_path) == 2:
name, path = name_path
stack.append(name)
for s_name in stack:
assert any(s_name.endswith(e) for e in endings), (s_name, endings)
assert path.startswith(base_path), path
yield s_name, path
stack = []
elif len(name_path) == 1:
stack.append(name_path[0])
else:
assert False, name_path
def parse(prefix):
with open(prefix / 'maps.asm') as f:
tokens = tokenize_lines(f.read().split('\n'))
l = list(flatten(tokens,
endings=['_Blocks'],
base_path='maps/'))
d = dict(l)
assert len(d) == len(l)
return d

5
tools/parse/number.py Normal file
View File

@ -0,0 +1,5 @@
def parse(n):
if n.startswith('$'):
return int(n[1:], 16)
else:
return int(n)

26
tools/parse/parse.py Normal file
View File

@ -0,0 +1,26 @@
import sys
from pathlib import Path
from parse import map_header
from parse import maps_blocks
from parse import tileset_constants
from parse import tileset_headers
from parse import gfx_tilesets
from parse import collision_tile_ids
from parse import map_objects
from parse import hidden_objects
from parse import map_constants
prefix = Path(sys.argv[1])
map_headers = map_header.parse_all(prefix)
maps_blocks_list = maps_blocks.parse(prefix)
tileset_constants_list = tileset_constants.parse(prefix)
tileset_headers_list = tileset_headers.parse(prefix)
gfx_tilesets_list = gfx_tilesets.parse(prefix)
# tileset coll
collision_tile_ids_list = collision_tile_ids.parse(prefix)
map_objects_list = map_objects.parse_all(prefix)
hidden_objects_list = hidden_objects.parse(prefix)
map_constants_list = map_constants.parse(prefix)

View File

@ -0,0 +1,19 @@
from parse.map_header import tokenize_line
def tokenize_lines(lines):
for line in lines:
if 'const' in line:
yield tokenize_line(line)
def flatten(tokens):
index = 0
for t in tokens:
if t[0] == 'const':
yield t[1][0], index
index += 1
def parse(prefix):
path = prefix / 'constants/tileset_constants.asm'
with open(path) as f:
return dict(flatten(tokenize_lines(f.read().split('\n'))))

View File

@ -0,0 +1,44 @@
from dataclasses import dataclass
from parse.map_header import tokenize_line
from parse import number
def tokenize_lines(lines):
for line in lines:
if 'tileset ' in line:
yield tokenize_line(line)
@dataclass
class TilesetHeader:
name: str
counters: list[str]
grass_tile: str
animations: str
def blockset(self):
# renamed from "block" to better disambiguate from Map blocks
return f"{self.name}_Block"
def gfx(self):
return f"{self.name}_GFX"
def coll(self):
return f"{self.name}_Coll"
def flatten(tokens):
for ts in tokens:
if ts[0] != 'tileset':
continue
_, (name, c0, c1, c2, grass_tile, animations) = ts
yield TilesetHeader(
name=name,
counters=tuple(map(number.parse, [c0, c1, c2])),
grass_tile=number.parse(grass_tile),
animations=animations
)
def parse(prefix):
path = prefix / 'data/tilesets/tileset_headers.asm'
with open(path) as f:
tokens = tokenize_lines(f.read().split('\n'))
return list(flatten(tokens))

16
tools/parse/tilesets.py Normal file
View File

@ -0,0 +1,16 @@
from parse.maps_blocks import tokenize_block, flatten
def tokenize_lines(lines):
for line in lines:
if '_GFX:' in line or '_Block:' in line:
yield tokenize_block(line, delim='::')
def parse(prefix):
path = prefix / 'gfx/tilesets.asm'
with open(path) as f:
tokens = tokenize_lines(f.read().split('\n'))
return list(
flatten(tokens,
endings=['_GFX', '_Block'],
base_path='gfx/')
)

39
tools/png_to_4bpp.py Normal file
View File

@ -0,0 +1,39 @@
import sys
from PIL import Image
def two_bpp_index(px):
indices = {0x00: 0, 0x55: 1, 0xaa: 2, 0xff: 3}
assert px in indices, px
return indices[px]
def convert(image):
assert image.mode == 'L', image.mode
width, height = image.size
buf = bytearray(width * height // 2)
for cell_y in range(height//8):
for cell_x in range(width//8):
for y in range(8):
for x in range(8):
px = im.getpixel((cell_x * 8 + x, cell_y * 8 + y))
index = two_bpp_index(px)
buf_ix = x//2 + (4 * (cell_x * 8 + (cell_y * width) + y))
buf[buf_ix] |= (index << 4 * (1 - (x % 2)))
return buf
def debug(buf):
for row in range(len(buf) // 4):
for x in range(4):
px = buf[row * 4 + x]
print((px >> 4) & 0xf, end='')
print((px >> 0) & 0xf, end='')
print()
if (row % 8 == 7):
print()
im = Image.open(sys.argv[1])
buf = convert(im)
with open(sys.argv[2], 'wb') as f:
f.write(buf)