From e0dd8ff9d50a7020e95209c7c82fe6a82daa1b8d Mon Sep 17 00:00:00 2001 From: red Date: Sun, 8 Jun 2025 21:18:17 -0400 Subject: [PATCH] wrote dsl for ast boiler plate generation --- ast/_ast.go | 55 +++++++++ ast/ast.go | 54 +-------- ast/astprinter.go | 4 +- ast/expr.ast | 20 ++++ ast/expr.go | 62 +++++++++++ ast/gen/debug.go | 55 +++++++++ ast/gen/generate.go | 52 +++++++++ ast/gen/lex.go | 180 ++++++++++++++++++++++++++++++ ast/gen/parser.go | 229 ++++++++++++++++++++++++++++++++++++++ ast/gen/transpile.go | 146 ++++++++++++++++++++++++ lexer/tokentype_string.go | 61 ++++++++++ parser/parser.go | 4 +- repl/main.go | 2 + run.go | 20 +++- runtime/interpreter.go | 4 +- runtime/loxtype_string.go | 27 +++++ 16 files changed, 915 insertions(+), 60 deletions(-) create mode 100644 ast/_ast.go create mode 100644 ast/expr.ast create mode 100644 ast/expr.go create mode 100644 ast/gen/debug.go create mode 100644 ast/gen/generate.go create mode 100644 ast/gen/lex.go create mode 100644 ast/gen/parser.go create mode 100644 ast/gen/transpile.go create mode 100644 lexer/tokentype_string.go create mode 100644 runtime/loxtype_string.go diff --git a/ast/_ast.go b/ast/_ast.go new file mode 100644 index 0000000..73d925f --- /dev/null +++ b/ast/_ast.go @@ -0,0 +1,55 @@ +package ast + +import "git.red-panda.pet/pandaware/lox-go/lexer" + +// todo: find something better than any here +// we can't use generics on either the visitor itself or +// each individual method because the adding it to the +// visitor itself infects every expr you use with it +// and methods cannot have generic parameters + +type ExprVisitor interface { + VisitBinaryExpr(b *BinaryExpr) any + VisitGroupingExpr(g *GroupingExpr) any + VisitLiteralExpr(g *LiteralExpr) any + VisitUnaryExpr(g *UnaryExpr) any +} + +type Expr interface { + Accept(v ExprVisitor) any +} + +type BinaryExpr struct { + Left Expr + Operator *lexer.Token + Right Expr +} + +func (b *BinaryExpr) Accept(v ExprVisitor) any { + return v.VisitBinaryExpr(b) +} + +type GroupingExpr struct { + Expr Expr +} + +func (g *GroupingExpr) Accept(v ExprVisitor) any { + return v.VisitGroupingExpr(g) +} + +type LiteralExpr struct { + Value any +} + +func (l *LiteralExpr) Accept(v ExprVisitor) any { + return v.VisitLiteralExpr(l) +} + +type UnaryExpr struct { + Operator *lexer.Token + Right Expr +} + +func (u *UnaryExpr) Accept(v ExprVisitor) any { + return v.VisitUnaryExpr(u) +} diff --git a/ast/ast.go b/ast/ast.go index 73d925f..986f514 100644 --- a/ast/ast.go +++ b/ast/ast.go @@ -1,55 +1,3 @@ package ast -import "git.red-panda.pet/pandaware/lox-go/lexer" - -// todo: find something better than any here -// we can't use generics on either the visitor itself or -// each individual method because the adding it to the -// visitor itself infects every expr you use with it -// and methods cannot have generic parameters - -type ExprVisitor interface { - VisitBinaryExpr(b *BinaryExpr) any - VisitGroupingExpr(g *GroupingExpr) any - VisitLiteralExpr(g *LiteralExpr) any - VisitUnaryExpr(g *UnaryExpr) any -} - -type Expr interface { - Accept(v ExprVisitor) any -} - -type BinaryExpr struct { - Left Expr - Operator *lexer.Token - Right Expr -} - -func (b *BinaryExpr) Accept(v ExprVisitor) any { - return v.VisitBinaryExpr(b) -} - -type GroupingExpr struct { - Expr Expr -} - -func (g *GroupingExpr) Accept(v ExprVisitor) any { - return v.VisitGroupingExpr(g) -} - -type LiteralExpr struct { - Value any -} - -func (l *LiteralExpr) Accept(v ExprVisitor) any { - return v.VisitLiteralExpr(l) -} - -type UnaryExpr struct { - Operator *lexer.Token - Right Expr -} - -func (u *UnaryExpr) Accept(v ExprVisitor) any { - return v.VisitUnaryExpr(u) -} +//go:generate go run ./gen -f expr.ast diff --git a/ast/astprinter.go b/ast/astprinter.go index 3ebd326..f22477e 100644 --- a/ast/astprinter.go +++ b/ast/astprinter.go @@ -12,7 +12,7 @@ type Printer struct { var _ ExprVisitor = new(Printer) func (p *Printer) VisitBinaryExpr(b *BinaryExpr) any { - return p.Parenthesize(b.Operator.Lexeme, b.Left, b.Right) + return p.Parenthesize(b.Op.Lexeme, b.Left, b.Right) } func (p *Printer) VisitGroupingExpr(g *GroupingExpr) any { @@ -38,7 +38,7 @@ func (p *Printer) VisitLiteralExpr(g *LiteralExpr) any { } func (p *Printer) VisitUnaryExpr(g *UnaryExpr) any { - return p.Parenthesize(g.Operator.Lexeme, g.Right) + return p.Parenthesize(g.Op.Lexeme, g.Right) } func (p *Printer) Parenthesize(name string, expressions ...Expr) string { diff --git a/ast/expr.ast b/ast/expr.ast new file mode 100644 index 0000000..d492abc --- /dev/null +++ b/ast/expr.ast @@ -0,0 +1,20 @@ +#Expr + +Binary [ + Left = Expr; + Op = *lexer.Token; + Right = Expr; +] + +Grouping [ + Expr = Expr; +] + +Literal [ + Value = any; +] + +Unary [ + Op = *lexer.Token; + Right = Expr; +] diff --git a/ast/expr.go b/ast/expr.go new file mode 100644 index 0000000..c5a81c6 --- /dev/null +++ b/ast/expr.go @@ -0,0 +1,62 @@ +package ast + +// THIS FILE WAS AUTOMATICALLY GENERATED, DO NOT MANUALLY EDIT + +import "git.red-panda.pet/pandaware/lox-go/lexer" + +type ExprVisitor interface { + VisitBinaryExpr(v *BinaryExpr) any + VisitGroupingExpr(v *GroupingExpr) any + VisitLiteralExpr(v *LiteralExpr) any + VisitUnaryExpr(v *UnaryExpr) any +} + +type Expr interface { + Accept(v ExprVisitor) any +} + +type BinaryExpr struct { + Left Expr + + Op *lexer.Token + + Right Expr +} + +func (n *BinaryExpr) Accept(v ExprVisitor) any { + return v.VisitBinaryExpr(n) +} + +var _ Expr = new(BinaryExpr) + +type GroupingExpr struct { + Expr Expr +} + +func (n *GroupingExpr) Accept(v ExprVisitor) any { + return v.VisitGroupingExpr(n) +} + +var _ Expr = new(GroupingExpr) + +type LiteralExpr struct { + Value any +} + +func (n *LiteralExpr) Accept(v ExprVisitor) any { + return v.VisitLiteralExpr(n) +} + +var _ Expr = new(LiteralExpr) + +type UnaryExpr struct { + Op *lexer.Token + + Right Expr +} + +func (n *UnaryExpr) Accept(v ExprVisitor) any { + return v.VisitUnaryExpr(n) +} + +var _ Expr = new(UnaryExpr) diff --git a/ast/gen/debug.go b/ast/gen/debug.go new file mode 100644 index 0000000..6efe36c --- /dev/null +++ b/ast/gen/debug.go @@ -0,0 +1,55 @@ +package main + +type debugVisitor struct{} + +func debug(node node) string { + d := new(debugVisitor) + v, _ := node.accept(d) + return v +} + +// visitASTDefinitionsNode implements visitor. +func (d *debugVisitor) visitASTDefinitionsNode(a *astDefinitionsNode) (string, error) { + v := "#" + a.name + "\n\n" + + for _, defNode := range a.definitions { + def, _ := defNode.accept(d) + v += def + "\n\n" + } + + return v, nil +} + +// visitDefinition implements visitor. +func (d *debugVisitor) visitDefinition(def *definitionNode) (string, error) { + id, _ := def.identifier.accept(d) + v := id + " [\n" + + for _, fNode := range def.fields { + field, _ := fNode.accept(d) + v += "\t" + field + "\n" + } + + v += "]\n\n" + + return v, nil +} + +// visitField implements visitor. +func (d *debugVisitor) visitField(g *fieldNode) (string, error) { + left, _ := g.left.accept(d) + right, _ := g.right.accept(d) + return left + " = " + right + ";", nil +} + +// visitIdentifier implements visitor. +func (d *debugVisitor) visitIdentifier(i *identifierNode) (string, error) { + return i.value, nil +} + +// visitName implements visitor. +func (d *debugVisitor) visitName(n *nameNode) (string, error) { + return n.value, nil +} + +var _ visitor = new(debugVisitor) diff --git a/ast/gen/generate.go b/ast/gen/generate.go new file mode 100644 index 0000000..804109d --- /dev/null +++ b/ast/gen/generate.go @@ -0,0 +1,52 @@ +package main + +import ( + "flag" + "fmt" + "os" + "strings" +) + +var fileName string + +func init() { + flag.StringVar(&fileName, "f", "", "ast file to generate from") +} + +func main() { + flag.Parse() + + bs, err := os.ReadFile(fileName) + + tokens, errs := lex(string(bs)) + if len(errs) > 0 { + for _, err := range errs { + fmt.Printf("%s\n", err.Error()) + } + os.Exit(1) + } + + ast, err := parse(tokens) + if err != nil { + fmt.Printf("%s\n", err.Error()) + os.Exit(1) + } + + name, output, err := transpile(ast) + if err != nil { + fmt.Printf("%s\n", err.Error()) + os.Exit(1) + } + + f, err := os.Create(strings.ToLower(name) + ".go") + if err != nil { + fmt.Printf("%s\n", err.Error()) + os.Exit(1) + } + + _, err = f.Write(output) + if err != nil { + fmt.Printf("%s\n", err.Error()) + os.Exit(1) + } +} diff --git a/ast/gen/lex.go b/ast/gen/lex.go new file mode 100644 index 0000000..d84d9e1 --- /dev/null +++ b/ast/gen/lex.go @@ -0,0 +1,180 @@ +package main + +import ( + "errors" + "fmt" +) + +type tokenType int + +const ( + tokenTypeIdentifier tokenType = iota + tokenTypeRightBracket + tokenTypeLeftBracket + tokenTypeEqual + tokenTypeName + tokenTypeSemicolon + tokenTypeEOF +) + +type token struct { + Type tokenType + Lexeme string + Line int +} + +func isUpperAlpha(r rune) bool { + return r >= 'A' && r <= 'Z' +} + +func isAlpha(r rune) bool { + return isUpperAlpha(r) || (r >= 'a' && r <= 'z') +} + +func isNumeric(r rune) bool { + return r >= '0' && r <= '9' +} + +func isAlphaNumeric(r rune) bool { + return isAlpha(r) || isNumeric(r) +} + +func isGoIdentifier(r rune) bool { + return isAlphaNumeric(r) || r == '_' +} + +func isIdentifier(r rune) bool { + return isGoIdentifier(r) || r == '.' || r == '*' +} + +type lexer struct { + source []rune + current int + start int + line int + tokens []*token +} + +func (l *lexer) addToken(t tokenType) { + l.tokens = append(l.tokens, &token{ + Type: t, + Lexeme: string(l.source[l.start:l.current]), + Line: l.line, + }) +} + +func (l *lexer) peek() rune { + if l.isAtEnd() { + return rune(0) + } + return l.source[l.current] +} + +func (l *lexer) peekNext() rune { + if l.current+1 >= len(l.source) { + return rune(0) + } + return l.source[l.current+1] +} + +func (l *lexer) advance() rune { + r := l.source[l.current] + l.current += 1 + return r +} + +func (l *lexer) isAtEnd() bool { + return l.current >= len(l.source) +} + +func (l *lexer) scanToken() error { + r := l.advance() + + switch r { + case '[': + l.addToken(tokenTypeLeftBracket) + case ']': + l.addToken(tokenTypeRightBracket) + case '=': + l.addToken(tokenTypeEqual) + case ';': + l.addToken(tokenTypeSemicolon) + case ' ', '\r', '\t': + break + case '#': + next := l.peek() + if isUpperAlpha(next) { + l.name() + return nil + } + return errors.New(fmt.Sprintf("names must have an uppercase alphabetical first character, found '%s'", string(next))) + case '\n': + l.line += 1 + default: + if isIdentifier(r) { + l.identifier() + return nil + } + + return errors.New(fmt.Sprintf("unexpected character '%s' at line %d", string(r), l.line)) + } + + return nil +} + +func (l *lexer) name() { + for isGoIdentifier(l.peek()) && l.peek() != '\n' { + l.advance() + } + + text := l.source[l.start+1 : l.current] + l.advance() + + l.tokens = append(l.tokens, &token{ + Type: tokenTypeName, + Lexeme: string(text), + Line: l.line, + }) +} + +func (l *lexer) identifier() { + for isIdentifier(l.peek()) { + l.advance() + } + + text := l.source[l.start:l.current] + + l.tokens = append(l.tokens, &token{ + Type: tokenTypeIdentifier, + Lexeme: string(text), + Line: l.line, + }) +} + +func (l *lexer) scanTokens() ([]*token, []error) { + errs := []error{} + for !l.isAtEnd() { + l.start = l.current + err := l.scanToken() + if err != nil { + errs = append(errs, err) + } + } + + l.addToken(tokenTypeEOF) + return l.tokens, errs +} + +func lex(source string) ([]*token, []error) { + l := new(lexer) + + l.source = []rune(source) + l.current = 0 + l.start = 0 + l.line = 1 + l.tokens = []*token{} + + tokens, errs := l.scanTokens() + + return tokens, errs +} diff --git a/ast/gen/parser.go b/ast/gen/parser.go new file mode 100644 index 0000000..c840e00 --- /dev/null +++ b/ast/gen/parser.go @@ -0,0 +1,229 @@ +package main + +import ( + "errors" + "fmt" +) + +type visitor interface { + visitASTDefinitionsNode(a *astDefinitionsNode) (string, error) + visitName(n *nameNode) (string, error) + visitIdentifier(i *identifierNode) (string, error) + visitField(g *fieldNode) (string, error) + visitDefinition(d *definitionNode) (string, error) +} + +type node interface { + accept(v visitor) (string, error) +} + +type astDefinitionsNode struct { + name string + definitions []node +} + +// accept implements node. +func (a *astDefinitionsNode) accept(v visitor) (string, error) { + return v.visitASTDefinitionsNode(a) +} + +var _ node = new(astDefinitionsNode) + +type nameNode struct { + value string +} + +// accept implements node. +func (n *nameNode) accept(v visitor) (string, error) { + return v.visitName(n) +} + +var _ node = new(nameNode) + +type identifierNode struct { + value string +} + +// accept implements node. +func (i *identifierNode) accept(v visitor) (string, error) { + return v.visitIdentifier(i) +} + +var _ node = new(identifierNode) + +type fieldNode struct { + left node + right node +} + +// accept implements node. +func (f *fieldNode) accept(v visitor) (string, error) { + return v.visitField(f) +} + +var _ node = new(fieldNode) + +type definitionNode struct { + identifier node + fields []node +} + +// accept implements node. +func (d *definitionNode) accept(v visitor) (string, error) { + return v.visitDefinition(d) +} + +var _ node = new(definitionNode) + +type parser struct { + tokens []*token + current int +} + +func (p *parser) peek() *token { + return p.tokens[p.current] +} + +func (p *parser) isAtEnd() bool { + return p.peek().Type == tokenTypeEOF +} + +func (p *parser) check(t tokenType) bool { + if p.isAtEnd() { + return false + } + + return p.peek().Type == t +} + +func (p *parser) match(types ...tokenType) bool { + for _, t := range types { + if p.check(t) { + p.advance() + return true + } + } + + return false +} + +func (p *parser) advance() *token { + t := p.tokens[p.current] + p.current += 1 + return t +} + +func (p *parser) consume(t tokenType, msg string) (*token, error) { + if p.check(t) { + return p.advance(), nil + } + return nil, errors.New(msg) +} + +func (p *parser) previous() *token { + return p.tokens[p.current-1] +} + +func (p *parser) astDefinitions() (node, error) { + if p.match(tokenTypeName) { + name := p.previous() + defs := []node{} + + for !p.isAtEnd() { + def, err := p.definition() + if err != nil { + return nil, err + } + defs = append(defs, def) + } + + return &astDefinitionsNode{ + name: name.Lexeme, + definitions: defs, + }, nil + } + + return nil, errors.New("expected name definition at start of file") +} + +// definition -> identifier "[" field+ "]" +func (p *parser) definition() (node, error) { + id, err := p.identifier() + if err != nil { + return nil, err + } + + if p.match(tokenTypeLeftBracket) { + fields := []node{} + + for !p.check(tokenTypeRightBracket) && !p.isAtEnd() { + f, err := p.field() + + if err != nil { + return nil, err + } + + fields = append(fields, f) + } + + if p.isAtEnd() { + return nil, errors.New(fmt.Sprintf("expected ']' after field definitions in '%s', got EOF", debug(id))) + } + + _, err := p.consume(tokenTypeRightBracket, fmt.Sprintf("expected ']' after field definitions in '%s', got EOF", debug(id))) + if err != nil { + return nil, err + } + + return &definitionNode{ + identifier: id, + fields: fields, + }, nil + } + + return nil, errors.New(fmt.Sprintf("expected '[' after identifier '%s'", debug(id))) +} + +// field -> identifier "=" identifier ";" +func (p *parser) field() (node, error) { + left, err := p.identifier() + if err != nil { + return nil, err + } + + if p.match(tokenTypeEqual) { + right, err := p.identifier() + if err != nil { + return nil, err + } + + if p.match(tokenTypeSemicolon) { + return &fieldNode{ + left: left, + right: right, + }, nil + } + + return nil, errors.New(fmt.Sprintf("expected ';' at end of field '%s'", debug(left))) + } + + return nil, errors.New(fmt.Sprintf("expected '=' after identifier '%s'", debug(left))) +} + +func (p *parser) identifier() (node, error) { + if p.match(tokenTypeIdentifier) { + return &identifierNode{ + value: p.previous().Lexeme, + }, nil + } + + return nil, errors.New("expected identifier") +} + +func parse(tokens []*token) (node, error) { + p := new(parser) + p.tokens = tokens + p.current = 0 + + return p.astDefinitions() +} diff --git a/ast/gen/transpile.go b/ast/gen/transpile.go new file mode 100644 index 0000000..56354ba --- /dev/null +++ b/ast/gen/transpile.go @@ -0,0 +1,146 @@ +package main + +import ( + "bytes" + "fmt" + "go/format" + "text/template" +) + +type defTmplData struct { + DefName string + Name string + Fields []string +} + +type transpiler struct { + Name string + DefinitionNames []string + Definitions []string + astDefsTmpl *template.Template + defTmpl *template.Template +} + +var astDefinitionsTemplate = ` +{{ $name := .Name }} +package ast + +// THIS FILE WAS AUTOMATICALLY GENERATED, DO NOT MANUALLY EDIT + +import "git.red-panda.pet/pandaware/lox-go/lexer" + +type {{ .Name }}Visitor interface { + {{ range .DefinitionNames }}Visit{{ . }}{{ $name }}(v *{{ . }}{{ $name }}) any{{ "\n" }}{{ end }} +} + +type {{ .Name }} interface { + Accept(v {{ .Name }}Visitor) any +} + +{{ range .Definitions }} +{{ . }} +{{ end }}` + +var definitionTemplate = ` +type {{ .DefName }}{{ .Name }} struct { + {{ range .Fields }}{{ . }}{{ "\n" }}{{ end }} +} + +func (n *{{ .DefName }}{{ .Name }}) Accept(v {{ .Name }}Visitor) any { + return v.Visit{{ .DefName }}{{ .Name }}(n) +} + +var _ {{ .Name }} = new({{ .DefName }}{{ .Name }})` + +// visitASTDefinitionsNode implements visitor. +func (t *transpiler) visitASTDefinitionsNode(a *astDefinitionsNode) (string, error) { + t.Name = a.name + + for _, defNode := range a.definitions { + def, err := defNode.accept(t) + if err != nil { + return "", err + } + t.Definitions = append(t.Definitions, def) + } + + buf := &bytes.Buffer{} + err := t.astDefsTmpl.Execute(buf, t) + return buf.String(), err +} + +// visitDefinition implements visitor. +func (t *transpiler) visitDefinition(d *definitionNode) (string, error) { + name, err := d.identifier.accept(t) + if err != nil { + return "", err + } + + t.DefinitionNames = append(t.DefinitionNames, name) + + fields := make([]string, len(d.fields)) + + for _, fieldNode := range d.fields { + field, err := fieldNode.accept(t) + if err != nil { + return "", err + } + fields = append(fields, field) + } + + buf := &bytes.Buffer{} + + err = t.defTmpl.Execute(buf, defTmplData{ + DefName: name, + Name: t.Name, + Fields: fields, + }) + + return buf.String(), err +} + +// visitField implements visitor. +func (t *transpiler) visitField(g *fieldNode) (string, error) { + left, err := g.left.accept(t) + if err != nil { + return "", err + } + + right, err := g.right.accept(t) + if err != nil { + return "", err + } + + return fmt.Sprintf("\t%s\t%s\n", left, right), nil +} + +// visitIdentifier implements visitor. +func (t *transpiler) visitIdentifier(i *identifierNode) (string, error) { + return i.value, nil +} + +// visitName implements visitor. +func (t *transpiler) visitName(n *nameNode) (string, error) { + return n.value, nil +} + +var _ visitor = new(transpiler) + +func transpile(n node) (string, []byte, error) { + var err error + t := new(transpiler) + + t.Definitions = []string{} + t.DefinitionNames = []string{} + + t.astDefsTmpl, err = template.New("").Parse(astDefinitionsTemplate) + t.defTmpl, err = template.New("").Parse(definitionTemplate) + + str, err := n.accept(t) + if err != nil { + return "", nil, err + } + + bs, err := format.Source([]byte(str)) + return t.Name, bs, err +} diff --git a/lexer/tokentype_string.go b/lexer/tokentype_string.go new file mode 100644 index 0000000..14fe8af --- /dev/null +++ b/lexer/tokentype_string.go @@ -0,0 +1,61 @@ +// Code generated by "stringer -type TokenType -linecomment -trimprefix TokenType"; DO NOT EDIT. + +package lexer + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[TokenTypeLeftParen-0] + _ = x[TokenTypeRightParen-1] + _ = x[TokenTypeLeftBrace-2] + _ = x[TokenTypeRightBrace-3] + _ = x[TokenTypeComma-4] + _ = x[TokenTypeDot-5] + _ = x[TokenTypeMinus-6] + _ = x[TokenTypePlus-7] + _ = x[TokenTypeSemicolon-8] + _ = x[TokenTypeSlash-9] + _ = x[TokenTypeStar-10] + _ = x[TokenTypeBang-11] + _ = x[TokenTypeBangEq-12] + _ = x[TokenTypeEqual-13] + _ = x[TokenTypeEqualEqual-14] + _ = x[TokenTypeGreater-15] + _ = x[TokenTypeGreaterEq-16] + _ = x[TokenTypeLess-17] + _ = x[TokenTypeLessEq-18] + _ = x[TokenTypeIdentifier-19] + _ = x[TokenTypeString-20] + _ = x[TokenTypeNumber-21] + _ = x[TokenTypeAnd-22] + _ = x[TokenTypeClass-23] + _ = x[TokenTypeElse-24] + _ = x[TokenTypeFalse-25] + _ = x[TokenTypeFun-26] + _ = x[TokenTypeFor-27] + _ = x[TokenTypeIf-28] + _ = x[TokenTypeNil-29] + _ = x[TokenTypeOr-30] + _ = x[TokenTypePrint-31] + _ = x[TokenTypeReturn-32] + _ = x[TokenTypeSuper-33] + _ = x[TokenTypeThis-34] + _ = x[TokenTypeTrue-35] + _ = x[TokenTypeVar-36] + _ = x[TokenTypeWhile-37] + _ = x[TokenTypeEOF-38] +} + +const _TokenType_name = "LeftParenRightParenLeftBraceRightBraceCommaDotMinusPlusSemicolonSlashStarBangBangEqEqualEqualEqualGreaterGreaterEqLessLessEqIdentifierStringNumberAndClassElseFalseFunForIfNilOrPrintReturnSuperThisTrueVarWhileEOF" + +var _TokenType_index = [...]uint8{0, 9, 19, 28, 38, 43, 46, 51, 55, 64, 69, 73, 77, 83, 88, 98, 105, 114, 118, 124, 134, 140, 146, 149, 154, 158, 163, 166, 169, 171, 174, 176, 181, 187, 192, 196, 200, 203, 208, 211} + +func (i TokenType) String() string { + if i < 0 || i >= TokenType(len(_TokenType_index)-1) { + return "TokenType(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _TokenType_name[_TokenType_index[i]:_TokenType_index[i+1]] +} diff --git a/parser/parser.go b/parser/parser.go index 74bc7ff..93d0e6c 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -67,7 +67,7 @@ func (p *Parser) parseLeftAssocBinOps(operand operandFunc, tokenTypes ...lexer.T if err != nil { return e, err } - e = &ast.BinaryExpr{Left: e, Operator: op, Right: r} + e = &ast.BinaryExpr{Left: e, Op: op, Right: r} } return e, nil @@ -111,7 +111,7 @@ func (p *Parser) unary() (ast.Expr, error) { if p.match(lexer.TokenTypeBang, lexer.TokenTypeMinus) { op := p.previous() r, err := p.unary() - return &ast.UnaryExpr{Operator: op, Right: r}, err + return &ast.UnaryExpr{Op: op, Right: r}, err } return p.primary() diff --git a/repl/main.go b/repl/main.go index 5b82a8c..98b83a5 100644 --- a/repl/main.go +++ b/repl/main.go @@ -11,6 +11,7 @@ type REPL struct { input textinput.Model history []string + output []string historySize uint } @@ -24,6 +25,7 @@ func NewREPL(historySize uint) REPL { return REPL{ history: make([]string, historySize), + output: make([]string, historySize), historySize: historySize, input: inputModel, } diff --git a/run.go b/run.go index dfbf639..91ff4aa 100644 --- a/run.go +++ b/run.go @@ -2,7 +2,9 @@ package loxgo import ( "errors" + "fmt" "os" + "time" "git.red-panda.pet/pandaware/lox-go/lexer" "git.red-panda.pet/pandaware/lox-go/parser" @@ -25,19 +27,35 @@ func RunFile(filename string) error { func Run(source string) error { s := lexer.New(source) + + lexStart := time.Now() tokens, ok := s.ScanTokens() + lexDuration := time.Since(lexStart) + if !ok { return errors.New("lexer error") } p := parser.New(tokens) + + parseStart := time.Now() expr, err := p.Parse() + parseDuration := time.Since(parseStart) + if err != nil { return errors.New("parser error") } interpreter := runtime.NewInterpreter() - reporter.Debug(0, "interpreter", "stdin", interpreter.Evaluate(expr).Debug()) + + evalStart := time.Now() + reporter.Info(1, "interpreter", "repl", interpreter.Evaluate(expr).Debug()) + evalDuration := time.Since(evalStart) + + fmt.Println("") + reporter.Debug(-1, "lexer", "repl", fmt.Sprintf("took %s", lexDuration)) + reporter.Debug(-1, "parser", "repl", fmt.Sprintf("took %s", parseDuration)) + reporter.Debug(-1, "interp", "repl", fmt.Sprintf("took %s", evalDuration)) return nil } diff --git a/runtime/interpreter.go b/runtime/interpreter.go index 44a11e0..1b28d15 100644 --- a/runtime/interpreter.go +++ b/runtime/interpreter.go @@ -36,7 +36,7 @@ func (i *Interpreter) VisitBinaryExpr(b *ast.BinaryExpr) any { areStrings := leftType == LoxTypeString && rightType == LoxTypeString - switch b.Operator.Type { + switch b.Op.Type { case lexer.TokenTypeMinus: if !areNumbers { // todo: error here @@ -119,7 +119,7 @@ func (i *Interpreter) VisitLiteralExpr(g *ast.LiteralExpr) any { func (i *Interpreter) VisitUnaryExpr(g *ast.UnaryExpr) any { right := i.Evaluate(g.Right) - switch g.Operator.Type { + switch g.Op.Type { case lexer.TokenTypeMinus: if right.Type() != LoxTypeNumber { // todo: error here diff --git a/runtime/loxtype_string.go b/runtime/loxtype_string.go new file mode 100644 index 0000000..526656b --- /dev/null +++ b/runtime/loxtype_string.go @@ -0,0 +1,27 @@ +// Code generated by "stringer -type LoxType -trimprefix LoxType"; DO NOT EDIT. + +package runtime + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[LoxTypeString-0] + _ = x[LoxTypeNumber-1] + _ = x[LoxTypeBoolean-2] + _ = x[LoxTypeNil-3] + _ = x[LoxTypeUndefined-4] +} + +const _LoxType_name = "StringNumberBooleanNilUndefined" + +var _LoxType_index = [...]uint8{0, 6, 12, 19, 22, 31} + +func (i LoxType) String() string { + if i < 0 || i >= LoxType(len(_LoxType_index)-1) { + return "LoxType(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _LoxType_name[_LoxType_index[i]:_LoxType_index[i+1]] +}