diff --git a/ast.go b/ast.go deleted file mode 100644 index 1ed396d..0000000 --- a/ast.go +++ /dev/null @@ -1,53 +0,0 @@ -package main - -// todo: find something better than any here -// we can't use generics on either the visitor itself or -// each individual method because the adding it to the -// visitor itself infects every expr you use with it -// and methods cannot have generic parameters - -type exprVisitor interface { - visitBinaryExpr(b *binaryExpr) any - visitGroupingExpr(g *groupingExpr) any - visitLiteralExpr(g *literalExpr) any - visitUnaryExpr(g *unaryExpr) any -} - -type expr interface { - accept(v exprVisitor) any -} - -type binaryExpr struct { - Left expr - Operator *token - Right expr -} - -func (b *binaryExpr) accept(v exprVisitor) any { - return v.visitBinaryExpr(b) -} - -type groupingExpr struct { - Expr expr -} - -func (g *groupingExpr) accept(v exprVisitor) any { - return v.visitGroupingExpr(g) -} - -type literalExpr struct { - Value any -} - -func (l *literalExpr) accept(v exprVisitor) any { - return v.visitLiteralExpr(l) -} - -type unaryExpr struct { - Operator *token - Right expr -} - -func (u *unaryExpr) accept(v exprVisitor) any { - return v.visitUnaryExpr(u) -} diff --git a/astprinter.go b/astprinter.go deleted file mode 100644 index 2237513..0000000 --- a/astprinter.go +++ /dev/null @@ -1,68 +0,0 @@ -package main - -import ( - "fmt" - "strconv" -) - -type astPrinter struct { - result string -} - -// visitBinaryExpr implements exprVisitor. -func (p *astPrinter) visitBinaryExpr(b *binaryExpr) any { - return p.parenthesize(b.Operator.Lexeme, b.Left, b.Right) -} - -// visitGroupingExpr implements exprVisitor. -func (p *astPrinter) visitGroupingExpr(g *groupingExpr) any { - return p.parenthesize("group", g.Expr) -} - -// visitLiteralExpr implements exprVisitor. -func (p *astPrinter) visitLiteralExpr(g *literalExpr) any { - switch t := g.Value.(type) { - case string: - return t - case bool: - if t { - return "true" - } - return "false" - case float64: - return strconv.FormatFloat(t, 'f', 3, 64) - case nil: - return "nil" - } - - return fmt.Sprintf("%v", g.Value) -} - -// visitUnaryExpr implements exprVisitor. -func (p *astPrinter) visitUnaryExpr(g *unaryExpr) any { - return p.parenthesize(g.Operator.Lexeme, g.Right) -} - -func (p *astPrinter) parenthesize(name string, expressions ...expr) string { - val := "(" + name - - for _, e := range expressions { - exprStr, ok := (e.accept(p)).(string) - if !ok { - panic("badly implemented visitor") - } - val += " " + exprStr - } - - val += ")" - return val - -} - -func (p *astPrinter) print(e expr) string { - str, ok := (e.accept(p)).(string) - if !ok { - panic("badly implemented visitor") - } - return str -} diff --git a/main.go b/main.go index dc72579..99531fd 100644 --- a/main.go +++ b/main.go @@ -29,10 +29,10 @@ func runFile(filename string) { bs, err := os.ReadFile(filename) if err != nil { fmt.Printf("unable to read file '%s':\n\t%s", filename, err.Error()) - os.Exit(64) + os.Exit(1) } - if !run(string(bs)) { + if run(string(bs)) { os.Exit(65) } } @@ -59,33 +59,20 @@ func run(source string) bool { s := newScanner(source) tokens, ok := s.ScanTokens() if !ok { - return false + return true } - p := newParser(tokens) - expr, err := p.Parse() - if err != nil { - return false + for _, token := range tokens { + fmt.Println(token) } - printer := &astPrinter{} - fmt.Println(printer.print(expr)) - - return true + return false } func reportErr(line int, message string) { report(line, "", message) } -func reportSyntaxError(token *token, message string) { - if token.Type == tokenTypeEOF { - report(token.Line, "at EOF", message) - } else { - report(token.Line, "at \""+token.Lexeme+"\"", message) - } -} - func report(line int, where, message string) { fmt.Printf("[line %d] Error%s: %s\n", line, where, message) } diff --git a/parser.go b/parser.go deleted file mode 100644 index e1ce6b2..0000000 --- a/parser.go +++ /dev/null @@ -1,201 +0,0 @@ -package main - -type parseError interface { - Error() string - Token() *token -} - -type syntaxError struct { - token *token - message string -} - -func newSyntaxError(token *token, message string) *syntaxError { - return &syntaxError{token, message} -} - -func (s *syntaxError) Token() *token { - return s.token -} - -func (s *syntaxError) Error() string { - return s.message -} - -type operandFunc func() (expr, error) - -type parser struct { - tokens []*token - current int -} - -func newParser(tokens []*token) *parser { - return &parser{ - tokens: tokens, - current: 0, - } -} - -func (p *parser) Parse() (expr, error) { - e, err := p.expression() - if err != nil { - return e, err - } - - return e, err -} - -// expression -> equality -func (p *parser) expression() (expr, error) { - return p.equality() -} - -func (p *parser) parseLeftAssocBinOps(operand operandFunc, tokenTypes ...tokenType) (expr, error) { - e, err := operand() - if err != nil { - return e, err - } - - for p.match(tokenTypes...) { - op := p.previous() - r, err := operand() - if err != nil { - return e, err - } - e = &binaryExpr{e, op, r} - } - - return e, nil -} - -// eqality -> comparison ( ("!=" | "==") comparison )* -func (p *parser) equality() (expr, error) { - return p.parseLeftAssocBinOps( - p.comparison, - tokenTypeBangEq, tokenTypeEqualEqual, - ) -} - -// comparison -> term ( ( ">" | ">=" | "<" | "<=" ) term )* -func (p *parser) comparison() (expr, error) { - return p.parseLeftAssocBinOps( - p.term, - tokenTypeGreater, tokenTypeGreaterEq, - tokenTypeLess, tokenTypeLessEq, - ) -} - -// term -> factor ( ( "-" | "+" ) factor )* -func (p *parser) term() (expr, error) { - return p.parseLeftAssocBinOps( - p.factor, - tokenTypeMinus, tokenTypePlus, - ) -} - -// factor -> unary ( ( "*" | "/" ) unary )* -func (p *parser) factor() (expr, error) { - return p.parseLeftAssocBinOps( - p.unary, - tokenTypeSlash, tokenTypeStar, - ) -} - -// unary -> ( "!" | "-" ) unary | primary; -func (p *parser) unary() (expr, error) { - if p.match(tokenTypeBang, tokenTypeMinus) { - op := p.previous() - r, err := p.unary() - return &unaryExpr{op, r}, err - } - - return p.primary() -} - -// primary -> STRING | NUMBER | "true" | "false" | "nil" | "(" expression ")" -func (p *parser) primary() (expr, error) { - if p.match(tokenTypeTrue) { - return &literalExpr{true}, nil - } - if p.match(tokenTypeFalse) { - return &literalExpr{false}, nil - } - if p.match(tokenTypeNil) { - return &literalExpr{nil}, nil - } - - if p.match(tokenTypeString, tokenTypeNumber) { - return &literalExpr{p.previous().Literal}, nil - } - - if p.match(tokenTypeLeftParen) { - e, err := p.expression() - if err != nil { - return nil, err - } - _, err = p.consume(tokenTypeRightParen, "expected ')' after expression") - return &groupingExpr{e}, err - } - - return nil, newSyntaxError(p.peek(), "expected expression") -} - -func (p *parser) consume(tokenType tokenType, msg string) (*token, error) { - if p.check(tokenType) { - return p.advance(), nil - } - - return nil, newSyntaxError(p.peek(), msg) -} - -func (p *parser) synchronize() { - p.advance() - - for !p.isAtEnd() { - if p.previous().Type == tokenTypeSemicolon { - return - } - - if isKeyword(p.peek()) { - return - } - p.advance() - } -} - -func (p *parser) match(tokenTypes ...tokenType) bool { - for _, t := range tokenTypes { - if p.check(t) { - p.advance() - return true - } - } - - return false -} - -func (p *parser) check(t tokenType) bool { - if p.isAtEnd() { - return false - } - return p.peek().Type == t -} - -func (p *parser) advance() *token { - if !p.isAtEnd() { - p.current += 1 - } - return p.previous() -} - -func (p *parser) isAtEnd() bool { - return p.peek().Type == tokenTypeEOF -} - -func (p *parser) peek() *token { - return p.tokens[p.current] -} - -func (p *parser) previous() *token { - return p.tokens[p.current-1] -} diff --git a/scanner.go b/scanner.go index 4801ade..54d35bc 100644 --- a/scanner.go +++ b/scanner.go @@ -222,7 +222,7 @@ func (s *scanner) string() bool { // todo: escape sequences value := s.source[s.start+1 : s.current-1] - s.addToken(tokenTypeString, string(value)) + s.addToken(tokenTypeString, value) return false } diff --git a/tokentype.go b/tokentype.go index 2c2c529..fc11673 100644 --- a/tokentype.go +++ b/tokentype.go @@ -59,17 +59,6 @@ const ( tokenTypeEOF ) -var keywordTokenTypes = []tokenType{ - tokenTypeClass, - tokenTypeFun, - tokenTypeVar, - tokenTypeFor, - tokenTypeIf, - tokenTypeReturn, - tokenTypeWhile, - tokenTypePrint, -} - type token struct { Type tokenType Lexeme string @@ -80,12 +69,3 @@ type token struct { func (t token) String() string { return fmt.Sprintf("%s %s %+v", t.Type, t.Lexeme, t.Literal) } - -func isKeyword(token *token) bool { - for _, kt := range keywordTokenTypes { - if token.Type == kt { - return true - } - } - return false -}