lox-go/parser/parser.go

206 lines
4.1 KiB
Go

package parser
import (
"git.red-panda.pet/pandaware/lox-go/ast"
"git.red-panda.pet/pandaware/lox-go/lexer"
)
type ParseError interface {
Error() string
Token() *lexer.Token
}
type SyntaxError struct {
token *lexer.Token
message string
}
func newSyntaxError(token *lexer.Token, message string) *SyntaxError {
return &SyntaxError{token, message}
}
func (s *SyntaxError) Token() *lexer.Token {
return s.token
}
func (s *SyntaxError) Error() string {
return s.message
}
type operandFunc func() (ast.Expr, error)
type Parser struct {
tokens []*lexer.Token
current int
}
func New(tokens []*lexer.Token) *Parser {
return &Parser{
tokens: tokens,
current: 0,
}
}
func (p *Parser) Parse() (ast.Expr, error) {
e, err := p.expression()
if err != nil {
return e, err
}
return e, err
}
// expression -> equality
func (p *Parser) expression() (ast.Expr, error) {
return p.equality()
}
func (p *Parser) parseLeftAssocBinOps(operand operandFunc, tokenTypes ...lexer.TokenType) (ast.Expr, error) {
e, err := operand()
if err != nil {
return e, err
}
for p.match(tokenTypes...) {
op := p.previous()
r, err := operand()
if err != nil {
return e, err
}
e = &ast.BinaryExpr{Left: e, Op: op, Right: r}
}
return e, nil
}
// eqality -> comparison ( ("!=" | "==") comparison )*
func (p *Parser) equality() (ast.Expr, error) {
return p.parseLeftAssocBinOps(
p.comparison,
lexer.TokenTypeBangEq, lexer.TokenTypeEqualEqual,
)
}
// comparison -> term ( ( ">" | ">=" | "<" | "<=" ) term )*
func (p *Parser) comparison() (ast.Expr, error) {
return p.parseLeftAssocBinOps(
p.term,
lexer.TokenTypeGreater, lexer.TokenTypeGreaterEq,
lexer.TokenTypeLess, lexer.TokenTypeLessEq,
)
}
// term -> factor ( ( "-" | "+" ) factor )*
func (p *Parser) term() (ast.Expr, error) {
return p.parseLeftAssocBinOps(
p.factor,
lexer.TokenTypeMinus, lexer.TokenTypePlus,
)
}
// factor -> unary ( ( "*" | "/" ) unary )*
func (p *Parser) factor() (ast.Expr, error) {
return p.parseLeftAssocBinOps(
p.unary,
lexer.TokenTypeSlash, lexer.TokenTypeStar,
)
}
// unary -> ( "!" | "-" ) unary | primary;
func (p *Parser) unary() (ast.Expr, error) {
if p.match(lexer.TokenTypeBang, lexer.TokenTypeMinus) {
op := p.previous()
r, err := p.unary()
return &ast.UnaryExpr{Op: op, Right: r}, err
}
return p.primary()
}
// primary -> STRING | NUMBER | "true" | "false" | "nil" | "(" expression ")"
func (p *Parser) primary() (ast.Expr, error) {
if p.match(lexer.TokenTypeTrue) {
return &ast.LiteralExpr{Value: true}, nil
}
if p.match(lexer.TokenTypeFalse) {
return &ast.LiteralExpr{Value: false}, nil
}
if p.match(lexer.TokenTypeNil) {
return &ast.LiteralExpr{Value: nil}, nil
}
if p.match(lexer.TokenTypeString, lexer.TokenTypeNumber) {
return &ast.LiteralExpr{Value: p.previous().Literal}, nil
}
if p.match(lexer.TokenTypeLeftParen) {
e, err := p.expression()
if err != nil {
return nil, err
}
_, err = p.consume(lexer.TokenTypeRightParen, "expected ')' after expression")
return &ast.GroupingExpr{Expr: e}, err
}
return nil, newSyntaxError(p.peek(), "expected expression")
}
func (p *Parser) consume(tokenType lexer.TokenType, msg string) (*lexer.Token, error) {
if p.check(tokenType) {
return p.advance(), nil
}
return nil, newSyntaxError(p.peek(), msg)
}
func (p *Parser) synchronize() {
p.advance()
for !p.isAtEnd() {
if p.previous().Type == lexer.TokenTypeSemicolon {
return
}
if lexer.IsKeyword(p.peek()) {
return
}
p.advance()
}
}
func (p *Parser) match(tokenTypes ...lexer.TokenType) bool {
for _, t := range tokenTypes {
if p.check(t) {
p.advance()
return true
}
}
return false
}
func (p *Parser) check(t lexer.TokenType) bool {
if p.isAtEnd() {
return false
}
return p.peek().Type == t
}
func (p *Parser) advance() *lexer.Token {
if !p.isAtEnd() {
p.current += 1
}
return p.previous()
}
func (p *Parser) isAtEnd() bool {
return p.peek().Type == lexer.TokenTypeEOF
}
func (p *Parser) peek() *lexer.Token {
return p.tokens[p.current]
}
func (p *Parser) previous() *lexer.Token {
return p.tokens[p.current-1]
}