Compare commits

...

3 commits

Author SHA1 Message Date
red
edcbfde351
holy carp it parses 2025-06-07 21:16:05 -04:00
red
dc89e81cc5
ast + basic printer 2025-06-07 21:12:44 -04:00
red
5385a7509c
fix string literal token value type 2025-06-07 21:12:26 -04:00
6 changed files with 362 additions and 7 deletions

53
ast.go Normal file
View file

@ -0,0 +1,53 @@
package main
// todo: find something better than any here
// we can't use generics on either the visitor itself or
// each individual method because the adding it to the
// visitor itself infects every expr you use with it
// and methods cannot have generic parameters
type exprVisitor interface {
visitBinaryExpr(b *binaryExpr) any
visitGroupingExpr(g *groupingExpr) any
visitLiteralExpr(g *literalExpr) any
visitUnaryExpr(g *unaryExpr) any
}
type expr interface {
accept(v exprVisitor) any
}
type binaryExpr struct {
Left expr
Operator *token
Right expr
}
func (b *binaryExpr) accept(v exprVisitor) any {
return v.visitBinaryExpr(b)
}
type groupingExpr struct {
Expr expr
}
func (g *groupingExpr) accept(v exprVisitor) any {
return v.visitGroupingExpr(g)
}
type literalExpr struct {
Value any
}
func (l *literalExpr) accept(v exprVisitor) any {
return v.visitLiteralExpr(l)
}
type unaryExpr struct {
Operator *token
Right expr
}
func (u *unaryExpr) accept(v exprVisitor) any {
return v.visitUnaryExpr(u)
}

68
astprinter.go Normal file
View file

@ -0,0 +1,68 @@
package main
import (
"fmt"
"strconv"
)
type astPrinter struct {
result string
}
// visitBinaryExpr implements exprVisitor.
func (p *astPrinter) visitBinaryExpr(b *binaryExpr) any {
return p.parenthesize(b.Operator.Lexeme, b.Left, b.Right)
}
// visitGroupingExpr implements exprVisitor.
func (p *astPrinter) visitGroupingExpr(g *groupingExpr) any {
return p.parenthesize("group", g.Expr)
}
// visitLiteralExpr implements exprVisitor.
func (p *astPrinter) visitLiteralExpr(g *literalExpr) any {
switch t := g.Value.(type) {
case string:
return t
case bool:
if t {
return "true"
}
return "false"
case float64:
return strconv.FormatFloat(t, 'f', 3, 64)
case nil:
return "nil"
}
return fmt.Sprintf("%v", g.Value)
}
// visitUnaryExpr implements exprVisitor.
func (p *astPrinter) visitUnaryExpr(g *unaryExpr) any {
return p.parenthesize(g.Operator.Lexeme, g.Right)
}
func (p *astPrinter) parenthesize(name string, expressions ...expr) string {
val := "(" + name
for _, e := range expressions {
exprStr, ok := (e.accept(p)).(string)
if !ok {
panic("badly implemented visitor")
}
val += " " + exprStr
}
val += ")"
return val
}
func (p *astPrinter) print(e expr) string {
str, ok := (e.accept(p)).(string)
if !ok {
panic("badly implemented visitor")
}
return str
}

31
main.go
View file

@ -29,10 +29,10 @@ func runFile(filename string) {
bs, err := os.ReadFile(filename) bs, err := os.ReadFile(filename)
if err != nil { if err != nil {
fmt.Printf("unable to read file '%s':\n\t%s", filename, err.Error()) fmt.Printf("unable to read file '%s':\n\t%s", filename, err.Error())
os.Exit(1) os.Exit(64)
} }
if run(string(bs)) { if !run(string(bs)) {
os.Exit(65) os.Exit(65)
} }
} }
@ -59,20 +59,33 @@ func run(source string) bool {
s := newScanner(source) s := newScanner(source)
tokens, ok := s.ScanTokens() tokens, ok := s.ScanTokens()
if !ok { if !ok {
return true
}
for _, token := range tokens {
fmt.Println(token)
}
return false return false
}
p := newParser(tokens)
expr, err := p.Parse()
if err != nil {
return false
}
printer := &astPrinter{}
fmt.Println(printer.print(expr))
return true
} }
func reportErr(line int, message string) { func reportErr(line int, message string) {
report(line, "", message) report(line, "", message)
} }
func reportSyntaxError(token *token, message string) {
if token.Type == tokenTypeEOF {
report(token.Line, "at EOF", message)
} else {
report(token.Line, "at \""+token.Lexeme+"\"", message)
}
}
func report(line int, where, message string) { func report(line int, where, message string) {
fmt.Printf("[line %d] Error%s: %s\n", line, where, message) fmt.Printf("[line %d] Error%s: %s\n", line, where, message)
} }

201
parser.go Normal file
View file

@ -0,0 +1,201 @@
package main
type parseError interface {
Error() string
Token() *token
}
type syntaxError struct {
token *token
message string
}
func newSyntaxError(token *token, message string) *syntaxError {
return &syntaxError{token, message}
}
func (s *syntaxError) Token() *token {
return s.token
}
func (s *syntaxError) Error() string {
return s.message
}
type operandFunc func() (expr, error)
type parser struct {
tokens []*token
current int
}
func newParser(tokens []*token) *parser {
return &parser{
tokens: tokens,
current: 0,
}
}
func (p *parser) Parse() (expr, error) {
e, err := p.expression()
if err != nil {
return e, err
}
return e, err
}
// expression -> equality
func (p *parser) expression() (expr, error) {
return p.equality()
}
func (p *parser) parseLeftAssocBinOps(operand operandFunc, tokenTypes ...tokenType) (expr, error) {
e, err := operand()
if err != nil {
return e, err
}
for p.match(tokenTypes...) {
op := p.previous()
r, err := operand()
if err != nil {
return e, err
}
e = &binaryExpr{e, op, r}
}
return e, nil
}
// eqality -> comparison ( ("!=" | "==") comparison )*
func (p *parser) equality() (expr, error) {
return p.parseLeftAssocBinOps(
p.comparison,
tokenTypeBangEq, tokenTypeEqualEqual,
)
}
// comparison -> term ( ( ">" | ">=" | "<" | "<=" ) term )*
func (p *parser) comparison() (expr, error) {
return p.parseLeftAssocBinOps(
p.term,
tokenTypeGreater, tokenTypeGreaterEq,
tokenTypeLess, tokenTypeLessEq,
)
}
// term -> factor ( ( "-" | "+" ) factor )*
func (p *parser) term() (expr, error) {
return p.parseLeftAssocBinOps(
p.factor,
tokenTypeMinus, tokenTypePlus,
)
}
// factor -> unary ( ( "*" | "/" ) unary )*
func (p *parser) factor() (expr, error) {
return p.parseLeftAssocBinOps(
p.unary,
tokenTypeSlash, tokenTypeStar,
)
}
// unary -> ( "!" | "-" ) unary | primary;
func (p *parser) unary() (expr, error) {
if p.match(tokenTypeBang, tokenTypeMinus) {
op := p.previous()
r, err := p.unary()
return &unaryExpr{op, r}, err
}
return p.primary()
}
// primary -> STRING | NUMBER | "true" | "false" | "nil" | "(" expression ")"
func (p *parser) primary() (expr, error) {
if p.match(tokenTypeTrue) {
return &literalExpr{true}, nil
}
if p.match(tokenTypeFalse) {
return &literalExpr{false}, nil
}
if p.match(tokenTypeNil) {
return &literalExpr{nil}, nil
}
if p.match(tokenTypeString, tokenTypeNumber) {
return &literalExpr{p.previous().Literal}, nil
}
if p.match(tokenTypeLeftParen) {
e, err := p.expression()
if err != nil {
return nil, err
}
_, err = p.consume(tokenTypeRightParen, "expected ')' after expression")
return &groupingExpr{e}, err
}
return nil, newSyntaxError(p.peek(), "expected expression")
}
func (p *parser) consume(tokenType tokenType, msg string) (*token, error) {
if p.check(tokenType) {
return p.advance(), nil
}
return nil, newSyntaxError(p.peek(), msg)
}
func (p *parser) synchronize() {
p.advance()
for !p.isAtEnd() {
if p.previous().Type == tokenTypeSemicolon {
return
}
if isKeyword(p.peek()) {
return
}
p.advance()
}
}
func (p *parser) match(tokenTypes ...tokenType) bool {
for _, t := range tokenTypes {
if p.check(t) {
p.advance()
return true
}
}
return false
}
func (p *parser) check(t tokenType) bool {
if p.isAtEnd() {
return false
}
return p.peek().Type == t
}
func (p *parser) advance() *token {
if !p.isAtEnd() {
p.current += 1
}
return p.previous()
}
func (p *parser) isAtEnd() bool {
return p.peek().Type == tokenTypeEOF
}
func (p *parser) peek() *token {
return p.tokens[p.current]
}
func (p *parser) previous() *token {
return p.tokens[p.current-1]
}

View file

@ -222,7 +222,7 @@ func (s *scanner) string() bool {
// todo: escape sequences // todo: escape sequences
value := s.source[s.start+1 : s.current-1] value := s.source[s.start+1 : s.current-1]
s.addToken(tokenTypeString, value) s.addToken(tokenTypeString, string(value))
return false return false
} }

View file

@ -59,6 +59,17 @@ const (
tokenTypeEOF tokenTypeEOF
) )
var keywordTokenTypes = []tokenType{
tokenTypeClass,
tokenTypeFun,
tokenTypeVar,
tokenTypeFor,
tokenTypeIf,
tokenTypeReturn,
tokenTypeWhile,
tokenTypePrint,
}
type token struct { type token struct {
Type tokenType Type tokenType
Lexeme string Lexeme string
@ -69,3 +80,12 @@ type token struct {
func (t token) String() string { func (t token) String() string {
return fmt.Sprintf("%s %s %+v", t.Type, t.Lexeme, t.Literal) return fmt.Sprintf("%s %s %+v", t.Type, t.Lexeme, t.Literal)
} }
func isKeyword(token *token) bool {
for _, kt := range keywordTokenTypes {
if token.Type == kt {
return true
}
}
return false
}