Compare commits
2 Commits
e24b3ada5e
...
93ccdf2c4a
| Author | SHA1 | Date | |
|---|---|---|---|
| 93ccdf2c4a | |||
| d903115964 |
75
regs/assembler/__main__.py
Normal file
75
regs/assembler/__main__.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
from assembler.lexer import Lexer, LexerError
|
||||||
|
from assembler.parser import Parser, ParserError
|
||||||
|
from assembler.emitter import emit_instruction
|
||||||
|
|
||||||
|
sample = b"""
|
||||||
|
temp[0].xyzw = VE_ADD const[1].xyzw const[1].0000 const[1].0000
|
||||||
|
temp[1].xyzw = VE_ADD const[1].xyzw const[1].0000 const[1].0000
|
||||||
|
temp[0].x = VE_MAD const[0].x___ temp[1].x___ temp[0].y___
|
||||||
|
temp[0].x = VE_FRC temp[0].x___ temp[0].0000 temp[0].0000
|
||||||
|
temp[0].x = VE_MAD temp[0].x___ const[1].z___ const[1].w___
|
||||||
|
temp[0].y = ME_COS temp[0].xxxx temp[0].0000 temp[0].0000
|
||||||
|
temp[0].x = ME_SIN temp[0].xxxx temp[0].0000 temp[0].0000
|
||||||
|
temp[0].yz = VE_MUL input[0]._xy_ temp[0]._yy_ temp[0].0000
|
||||||
|
out[0].xz = VE_MAD input[0].-y-_-0-_ temp[0].x_0_ temp[0].y_0_
|
||||||
|
out[0].yw = VE_MAD input[0]._x_0 temp[0]._x_0 temp[0]._z_1
|
||||||
|
"""
|
||||||
|
|
||||||
|
def frontend_inner(buf):
|
||||||
|
lexer = Lexer(buf)
|
||||||
|
tokens = list(lexer.lex_tokens())
|
||||||
|
parser = Parser(tokens)
|
||||||
|
for ins, start_end in parser.instructions():
|
||||||
|
yield list(emit_instruction(ins)), start_end
|
||||||
|
|
||||||
|
def print_error(filename, buf, e):
|
||||||
|
assert len(e.args) == 2, e
|
||||||
|
message, token = e.args
|
||||||
|
lines = buf.splitlines()
|
||||||
|
line = lines[token.line - 1]
|
||||||
|
|
||||||
|
error_name = str(type(e).__name__)
|
||||||
|
col_indent = ' ' * token.col
|
||||||
|
col_pointer = '^' * len(token.lexeme)
|
||||||
|
RED = "\033[0;31m"
|
||||||
|
DEFAULT = "\033[0;0m"
|
||||||
|
print(f'File: "{filename}", line {token.line}, column {token.col}\n', file=sys.stderr)
|
||||||
|
sys.stderr.write(' ')
|
||||||
|
wrote_default = False
|
||||||
|
for i, c in enumerate(line.decode('utf-8')):
|
||||||
|
if i == token.col:
|
||||||
|
sys.stderr.write(RED)
|
||||||
|
sys.stderr.write(c)
|
||||||
|
if i == token.col + len(token.lexeme):
|
||||||
|
wrote_default = True
|
||||||
|
sys.stderr.write(DEFAULT)
|
||||||
|
if not wrote_default:
|
||||||
|
sys.stderr.write(DEFAULT)
|
||||||
|
sys.stderr.write('\n')
|
||||||
|
print(f" {RED}{col_indent}{col_pointer}{DEFAULT}", file=sys.stderr)
|
||||||
|
print(f'{RED}{error_name}{DEFAULT}: {message}', file=sys.stderr)
|
||||||
|
|
||||||
|
def frontend(filename, buf):
|
||||||
|
try:
|
||||||
|
yield from frontend_inner(buf)
|
||||||
|
except ParserError as e:
|
||||||
|
print_error(input_filename, buf, e)
|
||||||
|
raise
|
||||||
|
except LexerError as e:
|
||||||
|
print_error(input_filename, buf, e)
|
||||||
|
raise
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
input_filename = sys.argv[1]
|
||||||
|
#output_filename = sys.argv[2]
|
||||||
|
with open(input_filename, 'rb') as f:
|
||||||
|
buf = f.read()
|
||||||
|
output = list(frontend(input_filename, buf))
|
||||||
|
for cw, (start_ix, end_ix) in output:
|
||||||
|
if True:
|
||||||
|
print(f"0x{cw[0]:08x}, 0x{cw[1]:08x}, 0x{cw[2]:08x}, 0x{cw[3]:08x},")
|
||||||
|
else:
|
||||||
|
source = buf[start_ix:end_ix]
|
||||||
|
print(f"0x{cw[0]:08x}, 0x{cw[1]:08x}, 0x{cw[2]:08x}, 0x{cw[3]:08x}, // {source.decode('utf-8')}")
|
||||||
112
regs/assembler/emitter.py
Normal file
112
regs/assembler/emitter.py
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
from assembler.keywords import ME, VE, KW
|
||||||
|
from assembler.parser import Instruction, DestinationOp, Source
|
||||||
|
import pvs_dst
|
||||||
|
import pvs_src
|
||||||
|
import pvs_dst_bits
|
||||||
|
import pvs_src_bits
|
||||||
|
|
||||||
|
def we_x(s):
|
||||||
|
return int(0 in s)
|
||||||
|
|
||||||
|
def we_y(s):
|
||||||
|
return int(1 in s)
|
||||||
|
|
||||||
|
def we_z(s):
|
||||||
|
return int(2 in s)
|
||||||
|
|
||||||
|
def we_w(s):
|
||||||
|
return int(3 in s)
|
||||||
|
|
||||||
|
def dst_reg_type(kw):
|
||||||
|
if kw == KW.temporary:
|
||||||
|
return pvs_dst_bits.PVS_DST_REG_gen["TEMPORARY"]
|
||||||
|
elif kw == KW.a0:
|
||||||
|
return pvs_dst_bits.PVS_DST_REG_gen["A0"]
|
||||||
|
elif kw == KW.out:
|
||||||
|
return pvs_dst_bits.PVS_DST_REG_gen["OUT"]
|
||||||
|
elif kw == KW.out_repl_x:
|
||||||
|
return pvs_dst_bits.PVS_DST_REG_gen["OUT_REPL_X"]
|
||||||
|
elif kw == KW.alt_temporary:
|
||||||
|
return pvs_dst_bits.PVS_DST_REG_gen["ALT_TEMPORARY"]
|
||||||
|
elif kw == KW.input:
|
||||||
|
return pvs_dst_bits.PVS_DST_REG_gen["INPUT"]
|
||||||
|
else:
|
||||||
|
assert not "Invalid PVS_DST_REG", kw
|
||||||
|
|
||||||
|
def emit_destination_op(dst_op: DestinationOp):
|
||||||
|
assert type(dst_op.opcode) in {ME, VE}
|
||||||
|
math_inst = int(type(dst_op.opcode) is ME)
|
||||||
|
value = (
|
||||||
|
pvs_dst.OPCODE_gen(dst_op.opcode.value)
|
||||||
|
| pvs_dst.MATH_INST_gen(math_inst)
|
||||||
|
| pvs_dst.REG_TYPE_gen(dst_reg_type(dst_op.type))
|
||||||
|
| pvs_dst.OFFSET_gen(dst_op.offset)
|
||||||
|
| pvs_dst.WE_X_gen(we_x(dst_op.write_enable))
|
||||||
|
| pvs_dst.WE_Y_gen(we_y(dst_op.write_enable))
|
||||||
|
| pvs_dst.WE_Z_gen(we_z(dst_op.write_enable))
|
||||||
|
| pvs_dst.WE_W_gen(we_w(dst_op.write_enable))
|
||||||
|
)
|
||||||
|
yield value
|
||||||
|
|
||||||
|
def src_reg_type(kw):
|
||||||
|
if kw == KW.temporary:
|
||||||
|
return pvs_src_bits.PVS_SRC_REG_TYPE_gen["PVS_SRC_REG_TEMPORARY"]
|
||||||
|
elif kw == KW.input:
|
||||||
|
return pvs_src_bits.PVS_SRC_REG_TYPE_gen["PVS_SRC_REG_INPUT"]
|
||||||
|
elif kw == KW.constant:
|
||||||
|
return pvs_src_bits.PVS_SRC_REG_TYPE_gen["PVS_SRC_REG_CONSTANT"]
|
||||||
|
elif kw == KW.alt_temporary:
|
||||||
|
return pvs_src_bits.PVS_SRC_REG_TYPE_gen["PVS_SRC_REG_ALT_TEMPORARY"]
|
||||||
|
else:
|
||||||
|
assert not "Invalid PVS_SRC_REG", kw
|
||||||
|
|
||||||
|
def emit_source(src: Source, prev: Source):
|
||||||
|
if src is not None:
|
||||||
|
value = (
|
||||||
|
pvs_src.REG_TYPE_gen(src_reg_type(src.type))
|
||||||
|
| pvs_src.OFFSET_gen(src.offset)
|
||||||
|
| pvs_src.SWIZZLE_X_gen(src.swizzle.select[0])
|
||||||
|
| pvs_src.SWIZZLE_Y_gen(src.swizzle.select[1])
|
||||||
|
| pvs_src.SWIZZLE_Z_gen(src.swizzle.select[2])
|
||||||
|
| pvs_src.SWIZZLE_W_gen(src.swizzle.select[3])
|
||||||
|
| pvs_src.MODIFIER_X_gen(int(src.swizzle.modifier[0]))
|
||||||
|
| pvs_src.MODIFIER_Y_gen(int(src.swizzle.modifier[1]))
|
||||||
|
| pvs_src.MODIFIER_Z_gen(int(src.swizzle.modifier[2]))
|
||||||
|
| pvs_src.MODIFIER_W_gen(int(src.swizzle.modifier[3]))
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
assert prev is not None
|
||||||
|
value = (
|
||||||
|
pvs_src.REG_TYPE_gen(src_reg_type(prev.type))
|
||||||
|
| pvs_src.OFFSET_gen(prev.offset)
|
||||||
|
| pvs_src.SWIZZLE_X_gen(7)
|
||||||
|
| pvs_src.SWIZZLE_Y_gen(7)
|
||||||
|
| pvs_src.SWIZZLE_Z_gen(7)
|
||||||
|
| pvs_src.SWIZZLE_W_gen(7)
|
||||||
|
| pvs_src.MODIFIER_X_gen(0)
|
||||||
|
| pvs_src.MODIFIER_Y_gen(0)
|
||||||
|
| pvs_src.MODIFIER_Z_gen(0)
|
||||||
|
| pvs_src.MODIFIER_W_gen(0)
|
||||||
|
)
|
||||||
|
yield value
|
||||||
|
|
||||||
|
def prev_source(ins, ix):
|
||||||
|
if ix == 0:
|
||||||
|
assert ins.source0 is not None
|
||||||
|
return ins.source0
|
||||||
|
elif ix == 1:
|
||||||
|
return ins.source0
|
||||||
|
elif ix == 2:
|
||||||
|
if ins.source1 is not None:
|
||||||
|
return ins.source1
|
||||||
|
else:
|
||||||
|
return ins.source0
|
||||||
|
else:
|
||||||
|
assert False, ix
|
||||||
|
|
||||||
|
def emit_instruction(ins: Instruction):
|
||||||
|
yield from emit_destination_op(ins.destination_op)
|
||||||
|
|
||||||
|
yield from emit_source(ins.source0, prev_source(ins, 0))
|
||||||
|
yield from emit_source(ins.source1, prev_source(ins, 1))
|
||||||
|
yield from emit_source(ins.source2, prev_source(ins, 2))
|
||||||
@ -3,7 +3,7 @@ from enum import Enum, auto
|
|||||||
from itertools import chain
|
from itertools import chain
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
import keywords
|
from assembler import keywords
|
||||||
|
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
|
|
||||||
@ -21,6 +21,7 @@ class TT(Enum):
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Token:
|
class Token:
|
||||||
|
start_ix: int
|
||||||
line: int
|
line: int
|
||||||
col: int
|
col: int
|
||||||
type: TT
|
type: TT
|
||||||
@ -64,7 +65,7 @@ class Lexer:
|
|||||||
return self.buf[self.current_ix]
|
return self.buf[self.current_ix]
|
||||||
|
|
||||||
def pos(self):
|
def pos(self):
|
||||||
return self.line, self.col - (self.current_ix - self.start_ix)
|
return self.start_ix, self.line, self.col - (self.current_ix - self.start_ix)
|
||||||
|
|
||||||
def identifier(self):
|
def identifier(self):
|
||||||
while not self.at_end_p() and self.peek() in identifier_characters:
|
while not self.at_end_p() and self.peek() in identifier_characters:
|
||||||
@ -96,7 +97,7 @@ class Lexer:
|
|||||||
elif c == ord('.'):
|
elif c == ord('.'):
|
||||||
return Token(*self.pos(), TT.dot, self.lexeme())
|
return Token(*self.pos(), TT.dot, self.lexeme())
|
||||||
elif c == ord(';'):
|
elif c == ord(';'):
|
||||||
while not at_end_p() and peek() != ord('\n'):
|
while not self.at_end_p() and self.peek() != ord('\n'):
|
||||||
self.advance()
|
self.advance()
|
||||||
elif c == ord(' ') or c == ord('\r') or c == ord('\t'):
|
elif c == ord(' ') or c == ord('\r') or c == ord('\t'):
|
||||||
pass
|
pass
|
||||||
|
|||||||
@ -1,15 +1,16 @@
|
|||||||
import lexer
|
|
||||||
from lexer import TT
|
|
||||||
from keywords import KW, ME, VE
|
|
||||||
from itertools import pairwise
|
from itertools import pairwise
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
|
from assembler import lexer
|
||||||
|
from assembler.lexer import TT, Token
|
||||||
|
from assembler.keywords import KW, ME, VE
|
||||||
|
|
||||||
"""
|
"""
|
||||||
temp[0].xyzw = VE_ADD const[1].xyzw const[1].0000 const[1].0000
|
temp[0].xyzw = VE_ADD const[1].xyzw const[1].0000 const[1].0000
|
||||||
temp[1].xyzw = VE_ADD const[1].xyzw const[1].0000 const[1].0000
|
temp[1].xyzw = VE_ADD const[1].xyzw const[1].0000 const[1].0000
|
||||||
temp[0].x = VE_MAD const[0].x___ temp[1].x___ temp[0].y___
|
temp[0].x = VE_MAD const[0].x___ temp[1].x___ temp[0].y___
|
||||||
temp[0].x = VE_FRAC temp[0].x___ temp[0].0000 temp[0].0000
|
temp[0].x = VE_FRC temp[0].x___ temp[0].0000 temp[0].0000
|
||||||
temp[0].x = VE_MAD temp[0].x___ const[1].z___ const[1].w___
|
temp[0].x = VE_MAD temp[0].x___ const[1].z___ const[1].w___
|
||||||
temp[0].y = ME_COS temp[0].xxxx temp[0].0000 temp[0].0000
|
temp[0].y = ME_COS temp[0].xxxx temp[0].0000 temp[0].0000
|
||||||
temp[0].x = ME_SIN temp[0].xxxx temp[0].0000 temp[0].0000
|
temp[0].x = ME_SIN temp[0].xxxx temp[0].0000 temp[0].0000
|
||||||
@ -43,7 +44,7 @@ class Instruction:
|
|||||||
source1: Source
|
source1: Source
|
||||||
source2: Source
|
source2: Source
|
||||||
|
|
||||||
class ParseError(Exception):
|
class ParserError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def identifier_to_number(token):
|
def identifier_to_number(token):
|
||||||
@ -51,18 +52,24 @@ def identifier_to_number(token):
|
|||||||
|
|
||||||
assert token.type is TT.identifier
|
assert token.type is TT.identifier
|
||||||
if not all(d in digits for d in token.lexeme):
|
if not all(d in digits for d in token.lexeme):
|
||||||
raise ParseError("expected number", token)
|
raise ParserError("expected number", token)
|
||||||
return int(bytes(token.lexeme), 10)
|
return int(bytes(token.lexeme), 10)
|
||||||
|
|
||||||
|
def we_ord(c):
|
||||||
|
if c == ord("w"):
|
||||||
|
return 3
|
||||||
|
else:
|
||||||
|
return c - ord("x")
|
||||||
|
|
||||||
def parse_dest_write_enable(token):
|
def parse_dest_write_enable(token):
|
||||||
we_chars = set(b"xyzw")
|
we_chars = set(b"xyzw")
|
||||||
assert token.type is TT.identifier
|
assert token.type is TT.identifier
|
||||||
we = bytes(token.lexeme).lower()
|
we = bytes(token.lexeme).lower()
|
||||||
if not all(c in we_chars for c in we):
|
if not all(c in we_chars for c in we):
|
||||||
raise ParseError("expected destination write enable", token)
|
raise ParserError("expected destination write enable", token)
|
||||||
if not all(a < b for a, b in pairwise(we)) or len(set(we)) != len(we):
|
if not all(we_ord(a) < we_ord(b) for a, b in pairwise(we)) or len(set(we)) != len(we):
|
||||||
raise ParseError("misleading non-sequential write enable", token)
|
raise ParserError("misleading non-sequential write enable", token)
|
||||||
return set(c - ord('x') for c in we)
|
return set(we_ord(c) for c in we)
|
||||||
|
|
||||||
def parse_source_swizzle(token):
|
def parse_source_swizzle(token):
|
||||||
select_mapping = {
|
select_mapping = {
|
||||||
@ -82,25 +89,25 @@ def parse_source_swizzle(token):
|
|||||||
swizzle_modifiers = [None] * 4
|
swizzle_modifiers = [None] * 4
|
||||||
lexeme = bytes(token.lexeme).lower()
|
lexeme = bytes(token.lexeme).lower()
|
||||||
while state < 4:
|
while state < 4:
|
||||||
if ix > len(token.lexeme):
|
if ix >= len(token.lexeme):
|
||||||
raise ParseError("invalid source swizzle", token)
|
raise ParserError("invalid source swizzle", token)
|
||||||
c = lexeme[ix]
|
c = lexeme[ix]
|
||||||
if c == ord('-'):
|
if c == ord('-'):
|
||||||
if (swizzle_modifiers[state] is not None) or (swizzle_selects[state] is not None):
|
if (swizzle_modifiers[state] is not None) or (swizzle_selects[state] is not None):
|
||||||
raise ParseError("invalid source swizzle modifier", token)
|
raise ParserError("invalid source swizzle modifier", token)
|
||||||
swizzle_modifiers[state] = True
|
swizzle_modifiers[state] = True
|
||||||
elif c in select_mapping:
|
elif c in select_mapping:
|
||||||
if swizzle_selects[state] is not None:
|
if swizzle_selects[state] is not None:
|
||||||
raise ParseError("invalid source swizzle select", token)
|
raise ParserError("invalid source swizzle select", token)
|
||||||
swizzle_selects[state] = select_mapping[c]
|
swizzle_selects[state] = select_mapping[c]
|
||||||
if swizzle_modifiers[state] is None:
|
if swizzle_modifiers[state] is None:
|
||||||
swizzle_modifiers[state] = False
|
swizzle_modifiers[state] = False
|
||||||
state += 1
|
state += 1
|
||||||
else:
|
else:
|
||||||
raise ParseError("invalid source swizzle", token)
|
raise ParserError("invalid source swizzle", token)
|
||||||
ix += 1
|
ix += 1
|
||||||
if ix != len(lexeme):
|
if ix != len(lexeme):
|
||||||
raise ParseError("invalid source swizzle", token)
|
raise ParserError("invalid source swizzle", token)
|
||||||
return SourceSwizzle(swizzle_selects, swizzle_modifiers)
|
return SourceSwizzle(swizzle_selects, swizzle_modifiers)
|
||||||
|
|
||||||
class Parser:
|
class Parser:
|
||||||
@ -108,8 +115,10 @@ class Parser:
|
|||||||
self.current_ix = 0
|
self.current_ix = 0
|
||||||
self.tokens = tokens
|
self.tokens = tokens
|
||||||
|
|
||||||
def peek(self):
|
def peek(self, offset=0):
|
||||||
return self.tokens[self.current_ix]
|
token = self.tokens[self.current_ix + offset]
|
||||||
|
#print(token)
|
||||||
|
return token
|
||||||
|
|
||||||
def at_end_p(self):
|
def at_end_p(self):
|
||||||
return self.peek().type == TT.eof
|
return self.peek().type == TT.eof
|
||||||
@ -119,36 +128,27 @@ class Parser:
|
|||||||
self.current_ix += 1
|
self.current_ix += 1
|
||||||
return token
|
return token
|
||||||
|
|
||||||
def match(self, token_type, message):
|
def match(self, token_type):
|
||||||
token = self.advance()
|
token = self.peek()
|
||||||
return token.type == token_type
|
return token.type == token_type
|
||||||
|
|
||||||
def consume(self, token_type, message):
|
def consume(self, token_type, message):
|
||||||
token = self.advance()
|
token = self.advance()
|
||||||
if token.type != token_type:
|
if token.type != token_type:
|
||||||
raise ParseError(message, token)
|
raise ParserError(message, token)
|
||||||
return token
|
return token
|
||||||
|
|
||||||
def consume_either(self, token_type1, token_type2, message):
|
def consume_either(self, token_type1, token_type2, message):
|
||||||
token = self.advance()
|
token = self.advance()
|
||||||
if token.type != token_type1 and token.type != token_type2:
|
if token.type != token_type1 and token.type != token_type2:
|
||||||
raise ParseError(message, token)
|
raise ParserError(message, token)
|
||||||
return token
|
return token
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
def consume_keyword(self, keyword, message):
|
|
||||||
token = self.consume(TT.keyword, message)
|
|
||||||
assert token.keyword is not None
|
|
||||||
if token.keyword != keyword:
|
|
||||||
raise ParseError(message, token)
|
|
||||||
"""
|
|
||||||
|
|
||||||
def destination_type(self):
|
def destination_type(self):
|
||||||
token = self.consume(TT.keyword, "expected destination type")
|
token = self.consume(TT.keyword, "expected destination type")
|
||||||
destination_keywords = {KW.temporary, KW.a0, KW.out, KW.out_repl_x, KW.alt_temporary, KW.input}
|
destination_keywords = {KW.temporary, KW.a0, KW.out, KW.out_repl_x, KW.alt_temporary, KW.input}
|
||||||
if token.keyword not in destination_keywords:
|
if token.keyword not in destination_keywords:
|
||||||
raise ParseError("expected destination type", token)
|
raise ParserError("expected destination type", token)
|
||||||
return token.keyword
|
return token.keyword
|
||||||
|
|
||||||
def offset(self):
|
def offset(self):
|
||||||
@ -161,7 +161,7 @@ class Parser:
|
|||||||
def opcode(self):
|
def opcode(self):
|
||||||
token = self.consume(TT.keyword, "expected opcode")
|
token = self.consume(TT.keyword, "expected opcode")
|
||||||
if type(token.keyword) != VE and type(token.keyword) != ME:
|
if type(token.keyword) != VE and type(token.keyword) != ME:
|
||||||
raise ParseError("expected opcode", token)
|
raise ParserError("expected opcode", token)
|
||||||
return token.keyword
|
return token.keyword
|
||||||
|
|
||||||
def destination_op(self):
|
def destination_op(self):
|
||||||
@ -178,7 +178,7 @@ class Parser:
|
|||||||
token = self.consume(TT.keyword, "expected source type")
|
token = self.consume(TT.keyword, "expected source type")
|
||||||
source_keywords = {KW.temporary, KW.input, KW.constant, KW.alt_temporary}
|
source_keywords = {KW.temporary, KW.input, KW.constant, KW.alt_temporary}
|
||||||
if token.keyword not in source_keywords:
|
if token.keyword not in source_keywords:
|
||||||
raise ParseError("expected source type", token)
|
raise ParserError("expected source type", token)
|
||||||
return token.keyword
|
return token.keyword
|
||||||
|
|
||||||
def source_swizzle(self):
|
def source_swizzle(self):
|
||||||
@ -194,15 +194,32 @@ class Parser:
|
|||||||
return Source(source_type, offset, source_swizzle)
|
return Source(source_type, offset, source_swizzle)
|
||||||
|
|
||||||
def instruction(self):
|
def instruction(self):
|
||||||
|
while self.match(TT.eol):
|
||||||
|
self.advance()
|
||||||
|
first_token = self.peek()
|
||||||
destination_op = self.destination_op()
|
destination_op = self.destination_op()
|
||||||
source0 = self.source()
|
source0 = self.source()
|
||||||
|
if self.match(TT.eol) or self.match(TT.eof):
|
||||||
|
source1 = None
|
||||||
|
else:
|
||||||
source1 = self.source()
|
source1 = self.source()
|
||||||
|
if self.match(TT.eol) or self.match(TT.eof):
|
||||||
|
source2 = None
|
||||||
|
else:
|
||||||
source2 = self.source()
|
source2 = self.source()
|
||||||
|
last_token = self.peek(-1)
|
||||||
self.consume_either(TT.eol, TT.eof, "expected newline or EOF")
|
self.consume_either(TT.eol, TT.eof, "expected newline or EOF")
|
||||||
return Instruction(destination_op, source0, source1, source2)
|
return (
|
||||||
|
Instruction(destination_op, source0, source1, source2),
|
||||||
|
(first_token.start_ix, last_token.start_ix + len(last_token.lexeme))
|
||||||
|
)
|
||||||
|
|
||||||
|
def instructions(self):
|
||||||
|
while not self.match(TT.eof):
|
||||||
|
yield self.instruction()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from lexer import Lexer
|
from assembler.lexer import Lexer
|
||||||
buf = b"out[0].xz = VE_MAD input[0].-y-_-0-_ temp[0].x_0_ temp[0].y_0_"
|
buf = b"out[0].xz = VE_MAD input[0].-y-_-0-_ temp[0].x_0_ temp[0].y_0_"
|
||||||
lexer = Lexer(buf)
|
lexer = Lexer(buf)
|
||||||
tokens = list(lexer.lex_tokens())
|
tokens = list(lexer.lex_tokens())
|
||||||
|
|||||||
@ -67,12 +67,15 @@ def low_from_bits(bits):
|
|||||||
return bits
|
return bits
|
||||||
|
|
||||||
def generate_python(prefix, fields):
|
def generate_python(prefix, fields):
|
||||||
#out(0, f"class {prefix}:")
|
|
||||||
fields = list(fields)
|
fields = list(fields)
|
||||||
for field_name, bits, description in fields:
|
for field_name, bits, description in fields:
|
||||||
#out(1, f"@staticmethod")
|
|
||||||
out(0, f"def {field_name}(n):")
|
out(0, f"def {field_name}(n):")
|
||||||
out(1, f"return (n >> {low_from_bits(bits)}) & {mask_from_bits(bits)}")
|
out(1, f"return (n >> {low_from_bits(bits)}) & {hex(mask_from_bits(bits))}")
|
||||||
|
out(0, "")
|
||||||
|
|
||||||
|
out(0, f"def {field_name}_gen(n):")
|
||||||
|
out(1, f"assert ({hex(mask_from_bits(bits))} & n) == n, (n, {hex(mask_from_bits(bits))})")
|
||||||
|
out(1, f"return n << {low_from_bits(bits)}")
|
||||||
out(0, "")
|
out(0, "")
|
||||||
|
|
||||||
out(0, "table = [")
|
out(0, "table = [")
|
||||||
|
|||||||
@ -27,4 +27,5 @@ while ix < len(lines):
|
|||||||
print(f' {value.strip()}: "{key.strip()}",')
|
print(f' {value.strip()}: "{key.strip()}",')
|
||||||
ix += 1
|
ix += 1
|
||||||
print("}")
|
print("}")
|
||||||
|
print(f"{name}_gen = dict((v, k) for k, v in {name}.items())")
|
||||||
print()
|
print()
|
||||||
|
|||||||
@ -2,113 +2,9 @@ import pvs_src
|
|||||||
import pvs_src_bits
|
import pvs_src_bits
|
||||||
import pvs_dst
|
import pvs_dst
|
||||||
import pvs_dst_bits
|
import pvs_dst_bits
|
||||||
from pprint import pprint
|
|
||||||
import itertools
|
import itertools
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
import sys
|
||||||
code = [
|
|
||||||
0x00f00203,
|
|
||||||
0x00d10001,
|
|
||||||
0x01248001,
|
|
||||||
0x01248001,
|
|
||||||
]
|
|
||||||
|
|
||||||
# Radeon Compiler Program
|
|
||||||
# 0: MOV output[1].xyz, input[1].xyz_;
|
|
||||||
# 1: MOV output[0], input[0].xyz1;
|
|
||||||
# Final vertex program code:
|
|
||||||
# 0: op: 0x00702203 dst: 1o op: VE_ADD
|
|
||||||
# src0: 0x01d10021 reg: 1i swiz: X/ Y/ Z/ U
|
|
||||||
# src1: 0x01248021 reg: 1i swiz: 0/ 0/ 0/ 0
|
|
||||||
# src2: 0x01248021 reg: 1i swiz: 0/ 0/ 0/ 0
|
|
||||||
# 1: op: 0x00f00203 dst: 0o op: VE_ADD
|
|
||||||
# src0: 0x01510001 reg: 0i swiz: X/ Y/ Z/ 1
|
|
||||||
# src1: 0x01248001 reg: 0i swiz: 0/ 0/ 0/ 0
|
|
||||||
# src2: 0x01248001 reg: 0i swiz: 0/ 0/ 0/ 0
|
|
||||||
code = [
|
|
||||||
0x00702203,
|
|
||||||
0x01d10021,
|
|
||||||
0x01248021,
|
|
||||||
0x01248021,
|
|
||||||
0x00f00203,
|
|
||||||
0x01510001,
|
|
||||||
0x01248001,
|
|
||||||
0x01248001,
|
|
||||||
]
|
|
||||||
|
|
||||||
code = [
|
|
||||||
0x00f00003,
|
|
||||||
0x00d10022,
|
|
||||||
0x01248022,
|
|
||||||
0x01248022,
|
|
||||||
0x00f02003,
|
|
||||||
0x00d10022,
|
|
||||||
0x01248022,
|
|
||||||
0x01248022,
|
|
||||||
0x00100004,
|
|
||||||
0x01ff0002,
|
|
||||||
0x01ff0020,
|
|
||||||
0x01ff2000,
|
|
||||||
0x00100006,
|
|
||||||
0x01ff0000,
|
|
||||||
0x01248000,
|
|
||||||
0x01248000,
|
|
||||||
0x00100004,
|
|
||||||
0x01ff0000,
|
|
||||||
0x01ff4022,
|
|
||||||
0x01ff6022,
|
|
||||||
0x00100050,
|
|
||||||
0x00000000,
|
|
||||||
0x01248000,
|
|
||||||
0x01248000,
|
|
||||||
0x00f00204,
|
|
||||||
0x0165a000,
|
|
||||||
0x01690001,
|
|
||||||
0x01240000,
|
|
||||||
]
|
|
||||||
|
|
||||||
code = [
|
|
||||||
0x00f00003,
|
|
||||||
0x00d10022,
|
|
||||||
0x01248022,
|
|
||||||
0x01248022,
|
|
||||||
0x00f02003,
|
|
||||||
0x00d10022,
|
|
||||||
0x01248022,
|
|
||||||
0x01248022,
|
|
||||||
0x00100004,
|
|
||||||
0x01ff0002,
|
|
||||||
0x01ff0020,
|
|
||||||
0x01ff2000,
|
|
||||||
0x00100006,
|
|
||||||
0x01ff0000,
|
|
||||||
0x01248000,
|
|
||||||
0x01248000,
|
|
||||||
0x00100004,
|
|
||||||
0x01ff0000,
|
|
||||||
0x01ff4022,
|
|
||||||
0x01ff6022,
|
|
||||||
0x00200051,
|
|
||||||
0x00000000,
|
|
||||||
0x01248000,
|
|
||||||
0x01248000,
|
|
||||||
0x00100050,
|
|
||||||
0x00000000,
|
|
||||||
0x01248000,
|
|
||||||
0x01248000,
|
|
||||||
0x00600002,
|
|
||||||
0x01c8e001,
|
|
||||||
0x01c9e000,
|
|
||||||
0x01248000,
|
|
||||||
0x00500204,
|
|
||||||
0x1fe72001,
|
|
||||||
0x01e70000,
|
|
||||||
0x01e72000,
|
|
||||||
0x00a00204,
|
|
||||||
0x0138e001,
|
|
||||||
0x0138e000,
|
|
||||||
0x017ae000,
|
|
||||||
]
|
|
||||||
|
|
||||||
def out(level, *args):
|
def out(level, *args):
|
||||||
sys.stdout.write(" " * level + " ".join(args))
|
sys.stdout.write(" " * level + " ".join(args))
|
||||||
@ -151,8 +47,6 @@ def parse_code(code):
|
|||||||
|
|
||||||
ix += 4
|
ix += 4
|
||||||
|
|
||||||
#parse_code(code)
|
|
||||||
|
|
||||||
def dst_swizzle_from_we(dst_op):
|
def dst_swizzle_from_we(dst_op):
|
||||||
table = [
|
table = [
|
||||||
(pvs_dst.WE_X, "x"),
|
(pvs_dst.WE_X, "x"),
|
||||||
@ -280,5 +174,14 @@ def parse_instruction(instruction):
|
|||||||
|
|
||||||
print(dst.ljust(12), "=", op.ljust(9), " ".join(map(lambda s: s.ljust(17), rest)))
|
print(dst.ljust(12), "=", op.ljust(9), " ".join(map(lambda s: s.ljust(17), rest)))
|
||||||
|
|
||||||
for i in range(len(code) // 4):
|
def parse_hex(s):
|
||||||
|
assert s.startswith('0x')
|
||||||
|
return int(s.removeprefix('0x'), 16)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
filename = sys.argv[1]
|
||||||
|
with open(filename) as f:
|
||||||
|
buf = f.read()
|
||||||
|
code = [parse_hex(c.strip()) for c in buf.split(',') if c.strip()]
|
||||||
|
for i in range(len(code) // 4):
|
||||||
parse_instruction(code[i*4:i*4+4])
|
parse_instruction(code[i*4:i*4+4])
|
||||||
|
|||||||
@ -4,6 +4,7 @@ PVS_SRC_REG_TYPE = {
|
|||||||
2: "PVS_SRC_REG_CONSTANT",
|
2: "PVS_SRC_REG_CONSTANT",
|
||||||
3: "PVS_SRC_REG_ALT_TEMPORARY",
|
3: "PVS_SRC_REG_ALT_TEMPORARY",
|
||||||
}
|
}
|
||||||
|
PVS_SRC_REG_TYPE_gen = dict((v, k) for k, v in PVS_SRC_REG_TYPE.items())
|
||||||
|
|
||||||
PVS_SRC_SWIZZLE_SEL = {
|
PVS_SRC_SWIZZLE_SEL = {
|
||||||
0: "PVS_SRC_SELECT_X",
|
0: "PVS_SRC_SELECT_X",
|
||||||
@ -13,10 +14,12 @@ PVS_SRC_SWIZZLE_SEL = {
|
|||||||
4: "PVS_SRC_SELECT_FORCE_0",
|
4: "PVS_SRC_SELECT_FORCE_0",
|
||||||
5: "PVS_SRC_SELECT_FORCE_1",
|
5: "PVS_SRC_SELECT_FORCE_1",
|
||||||
}
|
}
|
||||||
|
PVS_SRC_SWIZZLE_SEL_gen = dict((v, k) for k, v in PVS_SRC_SWIZZLE_SEL.items())
|
||||||
|
|
||||||
PVS_SRC_ADDR_MODE = {
|
PVS_SRC_ADDR_MODE = {
|
||||||
0: "Absolute addressing",
|
0: "Absolute addressing",
|
||||||
1: "Relative addressing using A0 register",
|
1: "Relative addressing using A0 register",
|
||||||
2: "Relative addressing using I0 register",
|
2: "Relative addressing using I0 register",
|
||||||
}
|
}
|
||||||
|
PVS_SRC_ADDR_MODE_gen = dict((v, k) for k, v in PVS_SRC_ADDR_MODE.items())
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user