restructure
This commit is contained in:
parent
edcbfde351
commit
eebaadc16e
21 changed files with 937 additions and 626 deletions
8
README.md
Normal file
8
README.md
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
# lox-go
|
||||
|
||||
a (currently incomplete) implementation of lox in golang
|
||||
|
||||
|
||||
## todo
|
||||
- [ ] finish implementation
|
||||
- [ ] make better repl (see: [repl](./repl))
|
||||
53
ast.go
53
ast.go
|
|
@ -1,53 +0,0 @@
|
|||
package main
|
||||
|
||||
// todo: find something better than any here
|
||||
// we can't use generics on either the visitor itself or
|
||||
// each individual method because the adding it to the
|
||||
// visitor itself infects every expr you use with it
|
||||
// and methods cannot have generic parameters
|
||||
|
||||
type exprVisitor interface {
|
||||
visitBinaryExpr(b *binaryExpr) any
|
||||
visitGroupingExpr(g *groupingExpr) any
|
||||
visitLiteralExpr(g *literalExpr) any
|
||||
visitUnaryExpr(g *unaryExpr) any
|
||||
}
|
||||
|
||||
type expr interface {
|
||||
accept(v exprVisitor) any
|
||||
}
|
||||
|
||||
type binaryExpr struct {
|
||||
Left expr
|
||||
Operator *token
|
||||
Right expr
|
||||
}
|
||||
|
||||
func (b *binaryExpr) accept(v exprVisitor) any {
|
||||
return v.visitBinaryExpr(b)
|
||||
}
|
||||
|
||||
type groupingExpr struct {
|
||||
Expr expr
|
||||
}
|
||||
|
||||
func (g *groupingExpr) accept(v exprVisitor) any {
|
||||
return v.visitGroupingExpr(g)
|
||||
}
|
||||
|
||||
type literalExpr struct {
|
||||
Value any
|
||||
}
|
||||
|
||||
func (l *literalExpr) accept(v exprVisitor) any {
|
||||
return v.visitLiteralExpr(l)
|
||||
}
|
||||
|
||||
type unaryExpr struct {
|
||||
Operator *token
|
||||
Right expr
|
||||
}
|
||||
|
||||
func (u *unaryExpr) accept(v exprVisitor) any {
|
||||
return v.visitUnaryExpr(u)
|
||||
}
|
||||
55
ast/ast.go
Normal file
55
ast/ast.go
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
package ast
|
||||
|
||||
import "git.red-panda.pet/pandaware/lox-go/lexer"
|
||||
|
||||
// todo: find something better than any here
|
||||
// we can't use generics on either the visitor itself or
|
||||
// each individual method because the adding it to the
|
||||
// visitor itself infects every expr you use with it
|
||||
// and methods cannot have generic parameters
|
||||
|
||||
type ExprVisitor interface {
|
||||
VisitBinaryExpr(b *BinaryExpr) any
|
||||
VisitGroupingExpr(g *GroupingExpr) any
|
||||
VisitLiteralExpr(g *LiteralExpr) any
|
||||
VisitUnaryExpr(g *UnaryExpr) any
|
||||
}
|
||||
|
||||
type Expr interface {
|
||||
accept(v ExprVisitor) any
|
||||
}
|
||||
|
||||
type BinaryExpr struct {
|
||||
Left Expr
|
||||
Operator *lexer.Token
|
||||
Right Expr
|
||||
}
|
||||
|
||||
func (b *BinaryExpr) accept(v ExprVisitor) any {
|
||||
return v.VisitBinaryExpr(b)
|
||||
}
|
||||
|
||||
type GroupingExpr struct {
|
||||
Expr Expr
|
||||
}
|
||||
|
||||
func (g *GroupingExpr) accept(v ExprVisitor) any {
|
||||
return v.VisitGroupingExpr(g)
|
||||
}
|
||||
|
||||
type LiteralExpr struct {
|
||||
Value any
|
||||
}
|
||||
|
||||
func (l *LiteralExpr) accept(v ExprVisitor) any {
|
||||
return v.VisitLiteralExpr(l)
|
||||
}
|
||||
|
||||
type UnaryExpr struct {
|
||||
Operator *lexer.Token
|
||||
Right Expr
|
||||
}
|
||||
|
||||
func (u *UnaryExpr) accept(v ExprVisitor) any {
|
||||
return v.VisitUnaryExpr(u)
|
||||
}
|
||||
66
ast/astprinter.go
Normal file
66
ast/astprinter.go
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type Printer struct {
|
||||
result string
|
||||
}
|
||||
|
||||
var _ ExprVisitor = new(Printer)
|
||||
|
||||
func (p *Printer) VisitBinaryExpr(b *BinaryExpr) any {
|
||||
return p.Parenthesize(b.Operator.Lexeme, b.Left, b.Right)
|
||||
}
|
||||
|
||||
func (p *Printer) VisitGroupingExpr(g *GroupingExpr) any {
|
||||
return p.Parenthesize("group", g.Expr)
|
||||
}
|
||||
|
||||
func (p *Printer) VisitLiteralExpr(g *LiteralExpr) any {
|
||||
switch t := g.Value.(type) {
|
||||
case string:
|
||||
return t
|
||||
case bool:
|
||||
if t {
|
||||
return "true"
|
||||
}
|
||||
return "false"
|
||||
case float64:
|
||||
return strconv.FormatFloat(t, 'f', 3, 64)
|
||||
case nil:
|
||||
return "nil"
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", g.Value)
|
||||
}
|
||||
|
||||
func (p *Printer) VisitUnaryExpr(g *UnaryExpr) any {
|
||||
return p.Parenthesize(g.Operator.Lexeme, g.Right)
|
||||
}
|
||||
|
||||
func (p *Printer) Parenthesize(name string, expressions ...Expr) string {
|
||||
val := "(" + name
|
||||
|
||||
for _, e := range expressions {
|
||||
exprStr, ok := (e.accept(p)).(string)
|
||||
if !ok {
|
||||
panic("badly implemented visitor")
|
||||
}
|
||||
val += " " + exprStr
|
||||
}
|
||||
|
||||
val += ")"
|
||||
return val
|
||||
|
||||
}
|
||||
|
||||
func (p *Printer) Print(e Expr) string {
|
||||
str, ok := (e.accept(p)).(string)
|
||||
if !ok {
|
||||
panic("badly implemented visitor")
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type astPrinter struct {
|
||||
result string
|
||||
}
|
||||
|
||||
// visitBinaryExpr implements exprVisitor.
|
||||
func (p *astPrinter) visitBinaryExpr(b *binaryExpr) any {
|
||||
return p.parenthesize(b.Operator.Lexeme, b.Left, b.Right)
|
||||
}
|
||||
|
||||
// visitGroupingExpr implements exprVisitor.
|
||||
func (p *astPrinter) visitGroupingExpr(g *groupingExpr) any {
|
||||
return p.parenthesize("group", g.Expr)
|
||||
}
|
||||
|
||||
// visitLiteralExpr implements exprVisitor.
|
||||
func (p *astPrinter) visitLiteralExpr(g *literalExpr) any {
|
||||
switch t := g.Value.(type) {
|
||||
case string:
|
||||
return t
|
||||
case bool:
|
||||
if t {
|
||||
return "true"
|
||||
}
|
||||
return "false"
|
||||
case float64:
|
||||
return strconv.FormatFloat(t, 'f', 3, 64)
|
||||
case nil:
|
||||
return "nil"
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", g.Value)
|
||||
}
|
||||
|
||||
// visitUnaryExpr implements exprVisitor.
|
||||
func (p *astPrinter) visitUnaryExpr(g *unaryExpr) any {
|
||||
return p.parenthesize(g.Operator.Lexeme, g.Right)
|
||||
}
|
||||
|
||||
func (p *astPrinter) parenthesize(name string, expressions ...expr) string {
|
||||
val := "(" + name
|
||||
|
||||
for _, e := range expressions {
|
||||
exprStr, ok := (e.accept(p)).(string)
|
||||
if !ok {
|
||||
panic("badly implemented visitor")
|
||||
}
|
||||
val += " " + exprStr
|
||||
}
|
||||
|
||||
val += ")"
|
||||
return val
|
||||
|
||||
}
|
||||
|
||||
func (p *astPrinter) print(e expr) string {
|
||||
str, ok := (e.accept(p)).(string)
|
||||
if !ok {
|
||||
panic("badly implemented visitor")
|
||||
}
|
||||
return str
|
||||
}
|
||||
31
cli/main.go
Normal file
31
cli/main.go
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
loxgo "git.red-panda.pet/pandaware/lox-go"
|
||||
"git.red-panda.pet/pandaware/lox-go/reporter"
|
||||
)
|
||||
|
||||
var (
|
||||
file string
|
||||
)
|
||||
|
||||
func main() {
|
||||
reporter.SetLevel(reporter.LevelDebug)
|
||||
|
||||
s := bufio.NewScanner(os.Stdin)
|
||||
for {
|
||||
fmt.Printf("repl> ")
|
||||
s.Scan()
|
||||
|
||||
loxgo.Run(s.Text())
|
||||
if err := s.Err(); err != nil {
|
||||
reporter.Fatal(1, -1, "repl", "stdin", err.Error())
|
||||
}
|
||||
|
||||
fmt.Printf("\n")
|
||||
}
|
||||
}
|
||||
32
go.mod
32
go.mod
|
|
@ -1,3 +1,35 @@
|
|||
module git.red-panda.pet/pandaware/lox-go
|
||||
|
||||
go 1.23.2
|
||||
|
||||
require (
|
||||
github.com/charmbracelet/bubbles v0.21.0
|
||||
github.com/charmbracelet/bubbletea v1.3.5
|
||||
github.com/charmbracelet/lipgloss v1.1.0
|
||||
github.com/charmbracelet/log v0.4.2
|
||||
)
|
||||
|
||||
require (
|
||||
git.red-panda.pet/pandaware/lipgloss-catppuccin v0.0.0-20250608181442-a48744fcd663
|
||||
github.com/atotto/clipboard v0.1.4 // indirect
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
|
||||
github.com/charmbracelet/x/ansi v0.8.0 // indirect
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
|
||||
github.com/charmbracelet/x/term v0.2.1 // indirect
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
||||
github.com/go-logfmt/logfmt v0.6.0 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-localereader v0.0.1 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
|
||||
github.com/muesli/cancelreader v0.2.2 // indirect
|
||||
github.com/muesli/termenv v0.16.0 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect
|
||||
golang.org/x/sync v0.13.0 // indirect
|
||||
golang.org/x/sys v0.32.0 // indirect
|
||||
golang.org/x/text v0.3.8 // indirect
|
||||
)
|
||||
|
|
|
|||
63
go.sum
Normal file
63
go.sum
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
git.red-panda.pet/pandaware/lipgloss-catppuccin v0.0.0-20250608181442-a48744fcd663 h1:jgxkCPBt+XdRvo1RPBcaGfF3X77hLg9SexLXZm2I2A0=
|
||||
git.red-panda.pet/pandaware/lipgloss-catppuccin v0.0.0-20250608181442-a48744fcd663/go.mod h1:OsoRM6jK2N0aX3A3rDDGq28abo2b65uaIyb6ivtxoDI=
|
||||
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
|
||||
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||
github.com/charmbracelet/bubbles v0.21.0 h1:9TdC97SdRVg/1aaXNVWfFH3nnLAwOXr8Fn6u6mfQdFs=
|
||||
github.com/charmbracelet/bubbles v0.21.0/go.mod h1:HF+v6QUR4HkEpz62dx7ym2xc71/KBHg+zKwJtMw+qtg=
|
||||
github.com/charmbracelet/bubbletea v1.3.5 h1:JAMNLTbqMOhSwoELIr0qyP4VidFq72/6E9j7HHmRKQc=
|
||||
github.com/charmbracelet/bubbletea v1.3.5/go.mod h1:TkCnmH+aBd4LrXhXcqrKiYwRs7qyQx5rBgH5fVY3v54=
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
|
||||
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
|
||||
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
|
||||
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
|
||||
github.com/charmbracelet/log v0.4.2 h1:hYt8Qj6a8yLnvR+h7MwsJv/XvmBJXiueUcI3cIxsyig=
|
||||
github.com/charmbracelet/log v0.4.2/go.mod h1:qifHGX/tc7eluv2R6pWIpyHDDrrb/AG71Pf2ysQu5nw=
|
||||
github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
|
||||
github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd h1:vy0GVL4jeHEwG5YOXDmi86oYw2yuYUGqz6a8sLwg0X8=
|
||||
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
|
||||
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
|
||||
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
|
||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
|
||||
github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
|
||||
github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
|
||||
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
|
||||
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
|
||||
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
|
||||
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
|
||||
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
|
||||
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
|
||||
golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
|
||||
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610=
|
||||
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
|
||||
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
|
@ -1,27 +1,29 @@
|
|||
package main
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"git.red-panda.pet/pandaware/lox-go/reporter"
|
||||
)
|
||||
|
||||
var keywords = map[string]tokenType{
|
||||
"and": tokenTypeAnd,
|
||||
"class": tokenTypeClass,
|
||||
"else": tokenTypeElse,
|
||||
"false": tokenTypeFalse,
|
||||
"fun": tokenTypeFun,
|
||||
"for": tokenTypeFor,
|
||||
"if": tokenTypeIf,
|
||||
"nil": tokenTypeNil,
|
||||
"or": tokenTypeOr,
|
||||
"print": tokenTypePrint,
|
||||
"return": tokenTypeReturn,
|
||||
"super": tokenTypeSuper,
|
||||
"this": tokenTypeThis,
|
||||
"true": tokenTypeTrue,
|
||||
"var": tokenTypeVar,
|
||||
"while": tokenTypeWhile,
|
||||
var keywords = map[string]TokenType{
|
||||
"and": TokenTypeAnd,
|
||||
"class": TokenTypeClass,
|
||||
"else": TokenTypeElse,
|
||||
"false": TokenTypeFalse,
|
||||
"fun": TokenTypeFun,
|
||||
"for": TokenTypeFor,
|
||||
"if": TokenTypeIf,
|
||||
"nil": TokenTypeNil,
|
||||
"or": TokenTypeOr,
|
||||
"print": TokenTypePrint,
|
||||
"return": TokenTypeReturn,
|
||||
"super": TokenTypeSuper,
|
||||
"this": TokenTypeThis,
|
||||
"true": TokenTypeTrue,
|
||||
"var": TokenTypeVar,
|
||||
"while": TokenTypeWhile,
|
||||
}
|
||||
|
||||
func isDigit(r rune) bool {
|
||||
|
|
@ -38,20 +40,20 @@ func isAlphaNumeric(r rune) bool {
|
|||
return isDigit(r) || isAlpha(r)
|
||||
}
|
||||
|
||||
type scanner struct {
|
||||
type Scanner struct {
|
||||
source []rune
|
||||
tokens []*token
|
||||
tokens []*Token
|
||||
|
||||
start int
|
||||
current int
|
||||
line int
|
||||
}
|
||||
|
||||
func newScanner(source string) *scanner {
|
||||
s := new(scanner)
|
||||
func New(source string) *Scanner {
|
||||
s := new(Scanner)
|
||||
|
||||
s.source = []rune(source)
|
||||
s.tokens = []*token{}
|
||||
s.tokens = []*Token{}
|
||||
|
||||
s.start = 0
|
||||
s.current = 0
|
||||
|
|
@ -60,18 +62,18 @@ func newScanner(source string) *scanner {
|
|||
return s
|
||||
}
|
||||
|
||||
func (s *scanner) isAtEnd() bool {
|
||||
func (s *Scanner) isAtEnd() bool {
|
||||
return s.current >= len(s.source)
|
||||
}
|
||||
|
||||
func (s *scanner) advance() rune {
|
||||
func (s *Scanner) advance() rune {
|
||||
r := s.source[s.current]
|
||||
s.current += 1
|
||||
return r
|
||||
}
|
||||
|
||||
func (s *scanner) addToken(t tokenType, literal any) {
|
||||
s.tokens = append(s.tokens, &token{
|
||||
func (s *Scanner) addToken(t TokenType, literal any) {
|
||||
s.tokens = append(s.tokens, &Token{
|
||||
Type: t,
|
||||
Lexeme: string(s.source[s.start:s.current]),
|
||||
Literal: literal,
|
||||
|
|
@ -79,7 +81,7 @@ func (s *scanner) addToken(t tokenType, literal any) {
|
|||
})
|
||||
}
|
||||
|
||||
func (s *scanner) match(expected rune) bool {
|
||||
func (s *Scanner) match(expected rune) bool {
|
||||
if s.isAtEnd() {
|
||||
return false
|
||||
}
|
||||
|
|
@ -92,70 +94,70 @@ func (s *scanner) match(expected rune) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func (s *scanner) peek() rune {
|
||||
func (s *Scanner) peek() rune {
|
||||
if s.isAtEnd() {
|
||||
return rune(0)
|
||||
}
|
||||
return s.source[s.current]
|
||||
}
|
||||
|
||||
func (s *scanner) peekNext() rune {
|
||||
func (s *Scanner) peekNext() rune {
|
||||
if s.current+1 > len(s.source) {
|
||||
return rune(0)
|
||||
}
|
||||
return s.source[s.current+1]
|
||||
}
|
||||
|
||||
func (s *scanner) scanToken() bool {
|
||||
func (s *Scanner) scanToken() bool {
|
||||
r := s.advance()
|
||||
|
||||
switch r {
|
||||
// simple 1 character tokens
|
||||
case '(':
|
||||
s.addToken(tokenTypeLeftParen, nil)
|
||||
s.addToken(TokenTypeLeftParen, nil)
|
||||
case ')':
|
||||
s.addToken(tokenTypeRightParen, nil)
|
||||
s.addToken(TokenTypeRightParen, nil)
|
||||
case '{':
|
||||
s.addToken(tokenTypeLeftBrace, nil)
|
||||
s.addToken(TokenTypeLeftBrace, nil)
|
||||
case '}':
|
||||
s.addToken(tokenTypeRightBrace, nil)
|
||||
s.addToken(TokenTypeRightBrace, nil)
|
||||
case ',':
|
||||
s.addToken(tokenTypeComma, nil)
|
||||
s.addToken(TokenTypeComma, nil)
|
||||
case '.':
|
||||
s.addToken(tokenTypeDot, nil)
|
||||
s.addToken(TokenTypeDot, nil)
|
||||
case '-':
|
||||
s.addToken(tokenTypeMinus, nil)
|
||||
s.addToken(TokenTypeMinus, nil)
|
||||
case '+':
|
||||
s.addToken(tokenTypePlus, nil)
|
||||
s.addToken(TokenTypePlus, nil)
|
||||
case ';':
|
||||
s.addToken(tokenTypeSemicolon, nil)
|
||||
s.addToken(TokenTypeSemicolon, nil)
|
||||
case '*':
|
||||
s.addToken(tokenTypeStar, nil)
|
||||
s.addToken(TokenTypeStar, nil)
|
||||
|
||||
// simple 2 character tokens
|
||||
case '!':
|
||||
if s.match('=') {
|
||||
s.addToken(tokenTypeBangEq, nil)
|
||||
s.addToken(TokenTypeBangEq, nil)
|
||||
} else {
|
||||
s.addToken(tokenTypeBang, nil)
|
||||
s.addToken(TokenTypeBang, nil)
|
||||
}
|
||||
case '=':
|
||||
if s.match('=') {
|
||||
s.addToken(tokenTypeEqualEqual, nil)
|
||||
s.addToken(TokenTypeEqualEqual, nil)
|
||||
} else {
|
||||
s.addToken(tokenTypeEqual, nil)
|
||||
s.addToken(TokenTypeEqual, nil)
|
||||
}
|
||||
case '<':
|
||||
if s.match('=') {
|
||||
s.addToken(tokenTypeLessEq, nil)
|
||||
s.addToken(TokenTypeLessEq, nil)
|
||||
} else {
|
||||
s.addToken(tokenTypeLess, nil)
|
||||
s.addToken(TokenTypeLess, nil)
|
||||
}
|
||||
case '>':
|
||||
if s.match('=') {
|
||||
s.addToken(tokenTypeGreaterEq, nil)
|
||||
s.addToken(TokenTypeGreaterEq, nil)
|
||||
} else {
|
||||
s.addToken(tokenTypeGreater, nil)
|
||||
s.addToken(TokenTypeGreater, nil)
|
||||
}
|
||||
|
||||
case '/':
|
||||
|
|
@ -166,7 +168,7 @@ func (s *scanner) scanToken() bool {
|
|||
s.advance()
|
||||
}
|
||||
} else {
|
||||
s.addToken(tokenTypeSlash, nil)
|
||||
s.addToken(TokenTypeSlash, nil)
|
||||
}
|
||||
|
||||
// ignore whitespace
|
||||
|
|
@ -193,7 +195,7 @@ func (s *scanner) scanToken() bool {
|
|||
return false
|
||||
}
|
||||
|
||||
reportErr(s.line, fmt.Sprintf("Unexpected character %c", r))
|
||||
reporter.Err(s.line, fmt.Sprintf("Unexpected character %c", r))
|
||||
|
||||
return true
|
||||
}
|
||||
|
|
@ -201,7 +203,7 @@ func (s *scanner) scanToken() bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func (s *scanner) string() bool {
|
||||
func (s *Scanner) string() bool {
|
||||
// peek until we hit the end of the string or file, whichever is first
|
||||
for s.peek() != '"' && !s.isAtEnd() {
|
||||
// support strings with new lines :D
|
||||
|
|
@ -214,7 +216,7 @@ func (s *scanner) string() bool {
|
|||
// if the token didn't end before the file we report and err
|
||||
// and return that we got one
|
||||
if s.isAtEnd() {
|
||||
reportErr(s.line, "Unterminated string")
|
||||
reporter.Err(s.line, "Unterminated string")
|
||||
return true
|
||||
}
|
||||
|
||||
|
|
@ -222,12 +224,12 @@ func (s *scanner) string() bool {
|
|||
|
||||
// todo: escape sequences
|
||||
value := s.source[s.start+1 : s.current-1]
|
||||
s.addToken(tokenTypeString, string(value))
|
||||
s.addToken(TokenTypeString, string(value))
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *scanner) number() bool {
|
||||
func (s *Scanner) number() bool {
|
||||
for isDigit(s.peek()) {
|
||||
s.advance()
|
||||
}
|
||||
|
|
@ -241,12 +243,12 @@ func (s *scanner) number() bool {
|
|||
}
|
||||
|
||||
literal, _ := strconv.ParseFloat(string(s.source[s.start:s.current]), 64)
|
||||
s.addToken(tokenTypeNumber, literal)
|
||||
s.addToken(TokenTypeNumber, literal)
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *scanner) identifier() {
|
||||
func (s *Scanner) identifier() {
|
||||
for isAlphaNumeric(s.peek()) {
|
||||
s.advance()
|
||||
}
|
||||
|
|
@ -255,13 +257,13 @@ func (s *scanner) identifier() {
|
|||
tt, ok := keywords[string(text)]
|
||||
|
||||
if !ok {
|
||||
tt = tokenTypeIdentifier
|
||||
tt = TokenTypeIdentifier
|
||||
}
|
||||
|
||||
s.addToken(tt, nil)
|
||||
}
|
||||
|
||||
func (s *scanner) ScanTokens() ([]*token, bool) {
|
||||
func (s *Scanner) ScanTokens() ([]*Token, bool) {
|
||||
isErr := false
|
||||
|
||||
for !s.isAtEnd() {
|
||||
|
|
@ -269,8 +271,8 @@ func (s *scanner) ScanTokens() ([]*token, bool) {
|
|||
isErr = isErr || s.scanToken()
|
||||
}
|
||||
|
||||
s.tokens = append(s.tokens, &token{
|
||||
Type: tokenTypeEOF,
|
||||
s.tokens = append(s.tokens, &Token{
|
||||
Type: TokenTypeEOF,
|
||||
Lexeme: "",
|
||||
Literal: nil,
|
||||
Line: s.line,
|
||||
91
lexer/tokentype.go
Normal file
91
lexer/tokentype.go
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
package lexer
|
||||
|
||||
import "fmt"
|
||||
|
||||
//go:generate stringer -type TokenType -linecomment -trimprefix TokenType
|
||||
type TokenType int
|
||||
|
||||
const (
|
||||
// single char tokens
|
||||
|
||||
TokenTypeLeftParen TokenType = iota
|
||||
TokenTypeRightParen
|
||||
TokenTypeLeftBrace
|
||||
TokenTypeRightBrace
|
||||
TokenTypeComma
|
||||
TokenTypeDot
|
||||
TokenTypeMinus
|
||||
TokenTypePlus
|
||||
TokenTypeSemicolon
|
||||
TokenTypeSlash
|
||||
TokenTypeStar
|
||||
|
||||
// 1-2 char token
|
||||
|
||||
TokenTypeBang
|
||||
TokenTypeBangEq
|
||||
TokenTypeEqual
|
||||
TokenTypeEqualEqual
|
||||
TokenTypeGreater
|
||||
TokenTypeGreaterEq
|
||||
TokenTypeLess
|
||||
TokenTypeLessEq
|
||||
|
||||
// literals
|
||||
|
||||
TokenTypeIdentifier
|
||||
TokenTypeString
|
||||
TokenTypeNumber
|
||||
|
||||
// keywords
|
||||
|
||||
TokenTypeAnd
|
||||
TokenTypeClass
|
||||
TokenTypeElse
|
||||
TokenTypeFalse
|
||||
TokenTypeFun
|
||||
TokenTypeFor
|
||||
TokenTypeIf
|
||||
TokenTypeNil
|
||||
TokenTypeOr
|
||||
TokenTypePrint
|
||||
TokenTypeReturn
|
||||
TokenTypeSuper
|
||||
TokenTypeThis
|
||||
TokenTypeTrue
|
||||
TokenTypeVar
|
||||
TokenTypeWhile
|
||||
|
||||
TokenTypeEOF
|
||||
)
|
||||
|
||||
var KeywordTokenTypes = []TokenType{
|
||||
TokenTypeClass,
|
||||
TokenTypeFun,
|
||||
TokenTypeVar,
|
||||
TokenTypeFor,
|
||||
TokenTypeIf,
|
||||
TokenTypeReturn,
|
||||
TokenTypeWhile,
|
||||
TokenTypePrint,
|
||||
}
|
||||
|
||||
type Token struct {
|
||||
Type TokenType
|
||||
Lexeme string
|
||||
Literal any
|
||||
Line int
|
||||
}
|
||||
|
||||
func (t Token) String() string {
|
||||
return fmt.Sprintf("%s %s %+v", t.Type, t.Lexeme, t.Literal)
|
||||
}
|
||||
|
||||
func IsKeyword(token *Token) bool {
|
||||
for _, kt := range KeywordTokenTypes {
|
||||
if token.Type == kt {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
91
main.go
91
main.go
|
|
@ -1,91 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
var (
|
||||
file string
|
||||
)
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
args := flag.Args()
|
||||
|
||||
if len(args) > 1 {
|
||||
fmt.Printf("Usage: %s [script]", args[0])
|
||||
os.Exit(64)
|
||||
} else if len(args) == 1 {
|
||||
runFile(args[0])
|
||||
} else {
|
||||
repl()
|
||||
}
|
||||
}
|
||||
|
||||
func runFile(filename string) {
|
||||
bs, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
fmt.Printf("unable to read file '%s':\n\t%s", filename, err.Error())
|
||||
os.Exit(64)
|
||||
}
|
||||
|
||||
if !run(string(bs)) {
|
||||
os.Exit(65)
|
||||
}
|
||||
}
|
||||
|
||||
func repl() {
|
||||
s := bufio.NewScanner(os.Stdin)
|
||||
for {
|
||||
fmt.Printf("repl> ")
|
||||
s.Scan()
|
||||
text := s.Text()
|
||||
|
||||
if text == ":q" {
|
||||
return
|
||||
}
|
||||
|
||||
run(text)
|
||||
if err := s.Err(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func run(source string) bool {
|
||||
s := newScanner(source)
|
||||
tokens, ok := s.ScanTokens()
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
p := newParser(tokens)
|
||||
expr, err := p.Parse()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
printer := &astPrinter{}
|
||||
fmt.Println(printer.print(expr))
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func reportErr(line int, message string) {
|
||||
report(line, "", message)
|
||||
}
|
||||
|
||||
func reportSyntaxError(token *token, message string) {
|
||||
if token.Type == tokenTypeEOF {
|
||||
report(token.Line, "at EOF", message)
|
||||
} else {
|
||||
report(token.Line, "at \""+token.Lexeme+"\"", message)
|
||||
}
|
||||
}
|
||||
|
||||
func report(line int, where, message string) {
|
||||
fmt.Printf("[line %d] Error%s: %s\n", line, where, message)
|
||||
}
|
||||
201
parser.go
201
parser.go
|
|
@ -1,201 +0,0 @@
|
|||
package main
|
||||
|
||||
type parseError interface {
|
||||
Error() string
|
||||
Token() *token
|
||||
}
|
||||
|
||||
type syntaxError struct {
|
||||
token *token
|
||||
message string
|
||||
}
|
||||
|
||||
func newSyntaxError(token *token, message string) *syntaxError {
|
||||
return &syntaxError{token, message}
|
||||
}
|
||||
|
||||
func (s *syntaxError) Token() *token {
|
||||
return s.token
|
||||
}
|
||||
|
||||
func (s *syntaxError) Error() string {
|
||||
return s.message
|
||||
}
|
||||
|
||||
type operandFunc func() (expr, error)
|
||||
|
||||
type parser struct {
|
||||
tokens []*token
|
||||
current int
|
||||
}
|
||||
|
||||
func newParser(tokens []*token) *parser {
|
||||
return &parser{
|
||||
tokens: tokens,
|
||||
current: 0,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) Parse() (expr, error) {
|
||||
e, err := p.expression()
|
||||
if err != nil {
|
||||
return e, err
|
||||
}
|
||||
|
||||
return e, err
|
||||
}
|
||||
|
||||
// expression -> equality
|
||||
func (p *parser) expression() (expr, error) {
|
||||
return p.equality()
|
||||
}
|
||||
|
||||
func (p *parser) parseLeftAssocBinOps(operand operandFunc, tokenTypes ...tokenType) (expr, error) {
|
||||
e, err := operand()
|
||||
if err != nil {
|
||||
return e, err
|
||||
}
|
||||
|
||||
for p.match(tokenTypes...) {
|
||||
op := p.previous()
|
||||
r, err := operand()
|
||||
if err != nil {
|
||||
return e, err
|
||||
}
|
||||
e = &binaryExpr{e, op, r}
|
||||
}
|
||||
|
||||
return e, nil
|
||||
}
|
||||
|
||||
// eqality -> comparison ( ("!=" | "==") comparison )*
|
||||
func (p *parser) equality() (expr, error) {
|
||||
return p.parseLeftAssocBinOps(
|
||||
p.comparison,
|
||||
tokenTypeBangEq, tokenTypeEqualEqual,
|
||||
)
|
||||
}
|
||||
|
||||
// comparison -> term ( ( ">" | ">=" | "<" | "<=" ) term )*
|
||||
func (p *parser) comparison() (expr, error) {
|
||||
return p.parseLeftAssocBinOps(
|
||||
p.term,
|
||||
tokenTypeGreater, tokenTypeGreaterEq,
|
||||
tokenTypeLess, tokenTypeLessEq,
|
||||
)
|
||||
}
|
||||
|
||||
// term -> factor ( ( "-" | "+" ) factor )*
|
||||
func (p *parser) term() (expr, error) {
|
||||
return p.parseLeftAssocBinOps(
|
||||
p.factor,
|
||||
tokenTypeMinus, tokenTypePlus,
|
||||
)
|
||||
}
|
||||
|
||||
// factor -> unary ( ( "*" | "/" ) unary )*
|
||||
func (p *parser) factor() (expr, error) {
|
||||
return p.parseLeftAssocBinOps(
|
||||
p.unary,
|
||||
tokenTypeSlash, tokenTypeStar,
|
||||
)
|
||||
}
|
||||
|
||||
// unary -> ( "!" | "-" ) unary | primary;
|
||||
func (p *parser) unary() (expr, error) {
|
||||
if p.match(tokenTypeBang, tokenTypeMinus) {
|
||||
op := p.previous()
|
||||
r, err := p.unary()
|
||||
return &unaryExpr{op, r}, err
|
||||
}
|
||||
|
||||
return p.primary()
|
||||
}
|
||||
|
||||
// primary -> STRING | NUMBER | "true" | "false" | "nil" | "(" expression ")"
|
||||
func (p *parser) primary() (expr, error) {
|
||||
if p.match(tokenTypeTrue) {
|
||||
return &literalExpr{true}, nil
|
||||
}
|
||||
if p.match(tokenTypeFalse) {
|
||||
return &literalExpr{false}, nil
|
||||
}
|
||||
if p.match(tokenTypeNil) {
|
||||
return &literalExpr{nil}, nil
|
||||
}
|
||||
|
||||
if p.match(tokenTypeString, tokenTypeNumber) {
|
||||
return &literalExpr{p.previous().Literal}, nil
|
||||
}
|
||||
|
||||
if p.match(tokenTypeLeftParen) {
|
||||
e, err := p.expression()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err = p.consume(tokenTypeRightParen, "expected ')' after expression")
|
||||
return &groupingExpr{e}, err
|
||||
}
|
||||
|
||||
return nil, newSyntaxError(p.peek(), "expected expression")
|
||||
}
|
||||
|
||||
func (p *parser) consume(tokenType tokenType, msg string) (*token, error) {
|
||||
if p.check(tokenType) {
|
||||
return p.advance(), nil
|
||||
}
|
||||
|
||||
return nil, newSyntaxError(p.peek(), msg)
|
||||
}
|
||||
|
||||
func (p *parser) synchronize() {
|
||||
p.advance()
|
||||
|
||||
for !p.isAtEnd() {
|
||||
if p.previous().Type == tokenTypeSemicolon {
|
||||
return
|
||||
}
|
||||
|
||||
if isKeyword(p.peek()) {
|
||||
return
|
||||
}
|
||||
p.advance()
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) match(tokenTypes ...tokenType) bool {
|
||||
for _, t := range tokenTypes {
|
||||
if p.check(t) {
|
||||
p.advance()
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *parser) check(t tokenType) bool {
|
||||
if p.isAtEnd() {
|
||||
return false
|
||||
}
|
||||
return p.peek().Type == t
|
||||
}
|
||||
|
||||
func (p *parser) advance() *token {
|
||||
if !p.isAtEnd() {
|
||||
p.current += 1
|
||||
}
|
||||
return p.previous()
|
||||
}
|
||||
|
||||
func (p *parser) isAtEnd() bool {
|
||||
return p.peek().Type == tokenTypeEOF
|
||||
}
|
||||
|
||||
func (p *parser) peek() *token {
|
||||
return p.tokens[p.current]
|
||||
}
|
||||
|
||||
func (p *parser) previous() *token {
|
||||
return p.tokens[p.current-1]
|
||||
}
|
||||
206
parser/parser.go
Normal file
206
parser/parser.go
Normal file
|
|
@ -0,0 +1,206 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"git.red-panda.pet/pandaware/lox-go/ast"
|
||||
"git.red-panda.pet/pandaware/lox-go/lexer"
|
||||
)
|
||||
|
||||
type ParseError interface {
|
||||
Error() string
|
||||
Token() *lexer.Token
|
||||
}
|
||||
|
||||
type SyntaxError struct {
|
||||
token *lexer.Token
|
||||
message string
|
||||
}
|
||||
|
||||
func newSyntaxError(token *lexer.Token, message string) *SyntaxError {
|
||||
return &SyntaxError{token, message}
|
||||
}
|
||||
|
||||
func (s *SyntaxError) Token() *lexer.Token {
|
||||
return s.token
|
||||
}
|
||||
|
||||
func (s *SyntaxError) Error() string {
|
||||
return s.message
|
||||
}
|
||||
|
||||
type operandFunc func() (ast.Expr, error)
|
||||
|
||||
type Parser struct {
|
||||
tokens []*lexer.Token
|
||||
current int
|
||||
}
|
||||
|
||||
func New(tokens []*lexer.Token) *Parser {
|
||||
return &Parser{
|
||||
tokens: tokens,
|
||||
current: 0,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) Parse() (ast.Expr, error) {
|
||||
e, err := p.expression()
|
||||
if err != nil {
|
||||
return e, err
|
||||
}
|
||||
|
||||
return e, err
|
||||
}
|
||||
|
||||
// expression -> equality
|
||||
func (p *Parser) expression() (ast.Expr, error) {
|
||||
return p.equality()
|
||||
}
|
||||
|
||||
func (p *Parser) parseLeftAssocBinOps(operand operandFunc, tokenTypes ...lexer.TokenType) (ast.Expr, error) {
|
||||
e, err := operand()
|
||||
if err != nil {
|
||||
return e, err
|
||||
}
|
||||
|
||||
for p.match(tokenTypes...) {
|
||||
op := p.previous()
|
||||
r, err := operand()
|
||||
if err != nil {
|
||||
return e, err
|
||||
}
|
||||
e = &ast.BinaryExpr{Left: e, Operator: op, Right: r}
|
||||
}
|
||||
|
||||
return e, nil
|
||||
}
|
||||
|
||||
// eqality -> comparison ( ("!=" | "==") comparison )*
|
||||
func (p *Parser) equality() (ast.Expr, error) {
|
||||
return p.parseLeftAssocBinOps(
|
||||
p.comparison,
|
||||
lexer.TokenTypeBangEq, lexer.TokenTypeEqualEqual,
|
||||
)
|
||||
}
|
||||
|
||||
// comparison -> term ( ( ">" | ">=" | "<" | "<=" ) term )*
|
||||
func (p *Parser) comparison() (ast.Expr, error) {
|
||||
return p.parseLeftAssocBinOps(
|
||||
p.term,
|
||||
lexer.TokenTypeGreater, lexer.TokenTypeGreaterEq,
|
||||
lexer.TokenTypeLess, lexer.TokenTypeLessEq,
|
||||
)
|
||||
}
|
||||
|
||||
// term -> factor ( ( "-" | "+" ) factor )*
|
||||
func (p *Parser) term() (ast.Expr, error) {
|
||||
return p.parseLeftAssocBinOps(
|
||||
p.factor,
|
||||
lexer.TokenTypeMinus, lexer.TokenTypePlus,
|
||||
)
|
||||
}
|
||||
|
||||
// factor -> unary ( ( "*" | "/" ) unary )*
|
||||
func (p *Parser) factor() (ast.Expr, error) {
|
||||
return p.parseLeftAssocBinOps(
|
||||
p.unary,
|
||||
lexer.TokenTypeSlash, lexer.TokenTypeStar,
|
||||
)
|
||||
}
|
||||
|
||||
// unary -> ( "!" | "-" ) unary | primary;
|
||||
func (p *Parser) unary() (ast.Expr, error) {
|
||||
if p.match(lexer.TokenTypeBang, lexer.TokenTypeMinus) {
|
||||
op := p.previous()
|
||||
r, err := p.unary()
|
||||
return &ast.UnaryExpr{Operator: op, Right: r}, err
|
||||
}
|
||||
|
||||
return p.primary()
|
||||
}
|
||||
|
||||
// primary -> STRING | NUMBER | "true" | "false" | "nil" | "(" expression ")"
|
||||
func (p *Parser) primary() (ast.Expr, error) {
|
||||
if p.match(lexer.TokenTypeTrue) {
|
||||
return &ast.LiteralExpr{Value: true}, nil
|
||||
}
|
||||
if p.match(lexer.TokenTypeFalse) {
|
||||
return &ast.LiteralExpr{Value: false}, nil
|
||||
}
|
||||
if p.match(lexer.TokenTypeNil) {
|
||||
return &ast.LiteralExpr{Value: nil}, nil
|
||||
}
|
||||
|
||||
if p.match(lexer.TokenTypeString, lexer.TokenTypeNumber) {
|
||||
return &ast.LiteralExpr{Value: p.previous().Literal}, nil
|
||||
}
|
||||
|
||||
if p.match(lexer.TokenTypeLeftParen) {
|
||||
e, err := p.expression()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err = p.consume(lexer.TokenTypeRightParen, "expected ')' after expression")
|
||||
return &ast.GroupingExpr{Expr: e}, err
|
||||
}
|
||||
|
||||
return nil, newSyntaxError(p.peek(), "expected expression")
|
||||
}
|
||||
|
||||
func (p *Parser) consume(tokenType lexer.TokenType, msg string) (*lexer.Token, error) {
|
||||
if p.check(tokenType) {
|
||||
return p.advance(), nil
|
||||
}
|
||||
|
||||
return nil, newSyntaxError(p.peek(), msg)
|
||||
}
|
||||
|
||||
func (p *Parser) synchronize() {
|
||||
p.advance()
|
||||
|
||||
for !p.isAtEnd() {
|
||||
if p.previous().Type == lexer.TokenTypeSemicolon {
|
||||
return
|
||||
}
|
||||
|
||||
if lexer.IsKeyword(p.peek()) {
|
||||
return
|
||||
}
|
||||
p.advance()
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) match(tokenTypes ...lexer.TokenType) bool {
|
||||
for _, t := range tokenTypes {
|
||||
if p.check(t) {
|
||||
p.advance()
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Parser) check(t lexer.TokenType) bool {
|
||||
if p.isAtEnd() {
|
||||
return false
|
||||
}
|
||||
return p.peek().Type == t
|
||||
}
|
||||
|
||||
func (p *Parser) advance() *lexer.Token {
|
||||
if !p.isAtEnd() {
|
||||
p.current += 1
|
||||
}
|
||||
return p.previous()
|
||||
}
|
||||
|
||||
func (p *Parser) isAtEnd() bool {
|
||||
return p.peek().Type == lexer.TokenTypeEOF
|
||||
}
|
||||
|
||||
func (p *Parser) peek() *lexer.Token {
|
||||
return p.tokens[p.current]
|
||||
}
|
||||
|
||||
func (p *Parser) previous() *lexer.Token {
|
||||
return p.tokens[p.current-1]
|
||||
}
|
||||
71
repl/main.go
Normal file
71
repl/main.go
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
package repl
|
||||
|
||||
import (
|
||||
"git.red-panda.pet/pandaware/lox-go/reporter"
|
||||
"github.com/charmbracelet/bubbles/textinput"
|
||||
tea "github.com/charmbracelet/bubbletea"
|
||||
"github.com/charmbracelet/log"
|
||||
)
|
||||
|
||||
type REPL struct {
|
||||
input textinput.Model
|
||||
|
||||
history []string
|
||||
historySize uint
|
||||
}
|
||||
|
||||
func NewREPL(historySize uint) REPL {
|
||||
reporter.SetLevel(reporter.LevelDebug)
|
||||
|
||||
inputModel := textinput.New()
|
||||
inputModel.Focus()
|
||||
inputModel.Prompt = "repl> "
|
||||
inputModel.Placeholder = "..."
|
||||
|
||||
return REPL{
|
||||
history: make([]string, historySize),
|
||||
historySize: historySize,
|
||||
input: inputModel,
|
||||
}
|
||||
}
|
||||
|
||||
func (r REPL) Init() tea.Cmd {
|
||||
return textinput.Blink
|
||||
}
|
||||
|
||||
func (r REPL) handleKey(msg tea.KeyMsg) (REPL, tea.Cmd) {
|
||||
switch msg.Type {
|
||||
case tea.KeyEnter, tea.KeyCtrlC, tea.KeyEscape:
|
||||
return r, tea.Quit
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (r REPL) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
var cmd tea.Cmd
|
||||
|
||||
switch m := msg.(type) {
|
||||
case tea.KeyMsg:
|
||||
r, cmd = r.handleKey(m)
|
||||
if cmd != nil {
|
||||
return r, cmd
|
||||
}
|
||||
}
|
||||
|
||||
r.input, cmd = r.input.Update(msg)
|
||||
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (r REPL) View() string {
|
||||
return r.input.View()
|
||||
}
|
||||
|
||||
func Run(r REPL) error {
|
||||
p := tea.NewProgram(r)
|
||||
if _, err := p.Run(); err != nil {
|
||||
log.Error("error in REPL", "err", err)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
27
reporter/level_string.go
Normal file
27
reporter/level_string.go
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
// Code generated by "stringer -type Level -trimprefix Level"; DO NOT EDIT.
|
||||
|
||||
package reporter
|
||||
|
||||
import "strconv"
|
||||
|
||||
func _() {
|
||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||
// Re-run the stringer command to generate them again.
|
||||
var x [1]struct{}
|
||||
_ = x[LevelDebug-0]
|
||||
_ = x[LevelInfo-1]
|
||||
_ = x[LevelWarn-2]
|
||||
_ = x[LevelError-3]
|
||||
_ = x[LevelFatal-4]
|
||||
}
|
||||
|
||||
const _Level_name = "DebugInfoWarnErrorFatal"
|
||||
|
||||
var _Level_index = [...]uint8{0, 5, 9, 13, 18, 23}
|
||||
|
||||
func (i Level) String() string {
|
||||
if i >= Level(len(_Level_index)-1) {
|
||||
return "Level(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||
}
|
||||
return _Level_name[_Level_index[i]:_Level_index[i+1]]
|
||||
}
|
||||
117
reporter/main.go
Normal file
117
reporter/main.go
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
package reporter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"git.red-panda.pet/pandaware/lipgloss-catppuccin"
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
)
|
||||
|
||||
var (
|
||||
colors = catppuccin.Macchiato
|
||||
|
||||
lineStyle = lipgloss.NewStyle().Underline(true)
|
||||
contextStyle = lipgloss.NewStyle().Foreground(colors.Text)
|
||||
bracketStyle = lipgloss.NewStyle().Foreground(colors.Overlay2)
|
||||
|
||||
messageStyle = lipgloss.NewStyle().Foreground(colors.Text)
|
||||
|
||||
levelTagStyles = map[Level]lipgloss.Style{
|
||||
LevelError: lipgloss.NewStyle().Foreground(colors.Red),
|
||||
LevelInfo: lipgloss.NewStyle().Foreground(colors.Teal),
|
||||
LevelDebug: lipgloss.NewStyle().Foreground(colors.Green),
|
||||
LevelWarn: lipgloss.NewStyle().Foreground(colors.Yellow),
|
||||
LevelFatal: lipgloss.NewStyle().Foreground(colors.Mauve),
|
||||
}
|
||||
)
|
||||
|
||||
//go:generate stringer -type Level -trimprefix Level
|
||||
type Level uint8
|
||||
|
||||
const (
|
||||
LevelDebug Level = iota
|
||||
LevelInfo
|
||||
LevelWarn
|
||||
LevelError
|
||||
LevelFatal
|
||||
)
|
||||
|
||||
var currentLevel = LevelWarn
|
||||
|
||||
func SetLevel(level Level) {
|
||||
currentLevel = level
|
||||
}
|
||||
|
||||
func bracketed(content, bracketsStr string) string {
|
||||
brackets := []rune(bracketsStr)
|
||||
if len(brackets) != 2 {
|
||||
panic("invariant violation")
|
||||
}
|
||||
open := string(brackets[0])
|
||||
close := string(brackets[1])
|
||||
return bracketStyle.Render(open) + content + bracketStyle.Render(close)
|
||||
}
|
||||
|
||||
func Log(level Level, component, module string, line int, message string) {
|
||||
if currentLevel > level {
|
||||
return
|
||||
}
|
||||
|
||||
ctx := module
|
||||
|
||||
if module == "" {
|
||||
ctx = "<nil>"
|
||||
}
|
||||
|
||||
if line > 0 {
|
||||
ctx += " at line " + lineStyle.Render(
|
||||
strconv.Itoa(line),
|
||||
)
|
||||
}
|
||||
|
||||
levelStr := level.String()
|
||||
|
||||
if style, ok := levelTagStyles[level]; ok {
|
||||
levelStr = style.Render(levelStr)
|
||||
}
|
||||
|
||||
fmt.Printf("%s %s %s: %s\n",
|
||||
bracketed(levelStr, "[]"),
|
||||
bracketed(contextStyle.Render(component), "<>"),
|
||||
bracketed(contextStyle.Render(ctx), "()"),
|
||||
messageStyle.Render(message),
|
||||
)
|
||||
}
|
||||
|
||||
func Debug(line int, component, module, message string) {
|
||||
Log(LevelDebug, component, module, line, message)
|
||||
}
|
||||
|
||||
func Info(line int, component, module, message string) {
|
||||
Log(LevelInfo, component, module, line, message)
|
||||
}
|
||||
|
||||
func Warn(line int, component, module, message string) {
|
||||
Log(LevelWarn, component, module, line, message)
|
||||
}
|
||||
|
||||
func Error(line int, component, module, message string) {
|
||||
Log(LevelError, component, module, line, message)
|
||||
}
|
||||
|
||||
func Fatal(exitCode, line int, component, module, message string) {
|
||||
Log(LevelFatal, component, module, line, message)
|
||||
os.Exit(exitCode)
|
||||
}
|
||||
|
||||
// Deprecated: Use `Error` instead
|
||||
func Err(line int, message string) {
|
||||
Report(line, "unknown", message)
|
||||
}
|
||||
|
||||
// Deprecated: Use `Error` instead
|
||||
func Report(line int, where, message string) {
|
||||
Error(line, "unknown", where, message)
|
||||
}
|
||||
43
run.go
Normal file
43
run.go
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
package loxgo
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
|
||||
"git.red-panda.pet/pandaware/lox-go/ast"
|
||||
"git.red-panda.pet/pandaware/lox-go/lexer"
|
||||
"git.red-panda.pet/pandaware/lox-go/parser"
|
||||
"git.red-panda.pet/pandaware/lox-go/reporter"
|
||||
)
|
||||
|
||||
func RunFile(filename string) error {
|
||||
bs, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := Run(string(bs)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Run(source string) error {
|
||||
s := lexer.New(source)
|
||||
tokens, ok := s.ScanTokens()
|
||||
if !ok {
|
||||
return errors.New("lexer error")
|
||||
}
|
||||
|
||||
p := parser.New(tokens)
|
||||
expr, err := p.Parse()
|
||||
if err != nil {
|
||||
return errors.New("parser error")
|
||||
}
|
||||
|
||||
printer := &ast.Printer{}
|
||||
reporter.Debug(0, "astPrinter", "stdin", printer.Print(expr))
|
||||
|
||||
return nil
|
||||
}
|
||||
31
runtime/interpreter.go
Normal file
31
runtime/interpreter.go
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
package runtime
|
||||
|
||||
import "git.red-panda.pet/pandaware/lox-go/ast"
|
||||
|
||||
type Interpreter struct{}
|
||||
|
||||
var _ ast.ExprVisitor = new(Interpreter)
|
||||
|
||||
func NewInterpreter() *Interpreter {
|
||||
return new(Interpreter)
|
||||
}
|
||||
|
||||
// VisitBinaryExpr implements ast.ExprVisitor.
|
||||
func (i *Interpreter) VisitBinaryExpr(b *ast.BinaryExpr) any {
|
||||
panic("unimplemented")
|
||||
}
|
||||
|
||||
// VisitGroupingExpr implements ast.ExprVisitor.
|
||||
func (i *Interpreter) VisitGroupingExpr(g *ast.GroupingExpr) any {
|
||||
panic("unimplemented")
|
||||
}
|
||||
|
||||
// VisitLiteralExpr implements ast.ExprVisitor.
|
||||
func (i *Interpreter) VisitLiteralExpr(g *ast.LiteralExpr) any {
|
||||
panic("unimplemented")
|
||||
}
|
||||
|
||||
// VisitUnaryExpr implements ast.ExprVisitor.
|
||||
func (i *Interpreter) VisitUnaryExpr(g *ast.UnaryExpr) any {
|
||||
panic("unimplemented")
|
||||
}
|
||||
33
runtime/types.go
Normal file
33
runtime/types.go
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
package runtime
|
||||
|
||||
//go:generate stringer -type LoxType -trimprefix LoxType
|
||||
type LoxType int
|
||||
|
||||
const (
|
||||
LoxTypeString LoxType = iota
|
||||
LoxTypeNumber
|
||||
LoxTypeBoolean
|
||||
LoxTypeNil
|
||||
LoxTypeUndefined
|
||||
)
|
||||
|
||||
type LoxValue struct {
|
||||
raw any
|
||||
}
|
||||
|
||||
func (v *LoxValue) Type() LoxType {
|
||||
switch v.raw.(type) {
|
||||
case string:
|
||||
return LoxTypeString
|
||||
case float64, float32:
|
||||
return LoxTypeNumber
|
||||
case bool:
|
||||
return LoxTypeBoolean
|
||||
default:
|
||||
return LoxTypeUndefined
|
||||
}
|
||||
}
|
||||
|
||||
func Value(v any) *LoxValue {
|
||||
return &LoxValue{v}
|
||||
}
|
||||
91
tokentype.go
91
tokentype.go
|
|
@ -1,91 +0,0 @@
|
|||
package main
|
||||
|
||||
import "fmt"
|
||||
|
||||
//go:generate stringer -type tokenType -linecomment -trimprefix tokenType
|
||||
type tokenType int
|
||||
|
||||
const (
|
||||
// single char tokens
|
||||
|
||||
tokenTypeLeftParen tokenType = iota
|
||||
tokenTypeRightParen
|
||||
tokenTypeLeftBrace
|
||||
tokenTypeRightBrace
|
||||
tokenTypeComma
|
||||
tokenTypeDot
|
||||
tokenTypeMinus
|
||||
tokenTypePlus
|
||||
tokenTypeSemicolon
|
||||
tokenTypeSlash
|
||||
tokenTypeStar
|
||||
|
||||
// 1-2 char token
|
||||
|
||||
tokenTypeBang
|
||||
tokenTypeBangEq
|
||||
tokenTypeEqual
|
||||
tokenTypeEqualEqual
|
||||
tokenTypeGreater
|
||||
tokenTypeGreaterEq
|
||||
tokenTypeLess
|
||||
tokenTypeLessEq
|
||||
|
||||
// literals
|
||||
|
||||
tokenTypeIdentifier
|
||||
tokenTypeString
|
||||
tokenTypeNumber
|
||||
|
||||
// keywords
|
||||
|
||||
tokenTypeAnd
|
||||
tokenTypeClass
|
||||
tokenTypeElse
|
||||
tokenTypeFalse
|
||||
tokenTypeFun
|
||||
tokenTypeFor
|
||||
tokenTypeIf
|
||||
tokenTypeNil
|
||||
tokenTypeOr
|
||||
tokenTypePrint
|
||||
tokenTypeReturn
|
||||
tokenTypeSuper
|
||||
tokenTypeThis
|
||||
tokenTypeTrue
|
||||
tokenTypeVar
|
||||
tokenTypeWhile
|
||||
|
||||
tokenTypeEOF
|
||||
)
|
||||
|
||||
var keywordTokenTypes = []tokenType{
|
||||
tokenTypeClass,
|
||||
tokenTypeFun,
|
||||
tokenTypeVar,
|
||||
tokenTypeFor,
|
||||
tokenTypeIf,
|
||||
tokenTypeReturn,
|
||||
tokenTypeWhile,
|
||||
tokenTypePrint,
|
||||
}
|
||||
|
||||
type token struct {
|
||||
Type tokenType
|
||||
Lexeme string
|
||||
Literal any
|
||||
Line int
|
||||
}
|
||||
|
||||
func (t token) String() string {
|
||||
return fmt.Sprintf("%s %s %+v", t.Type, t.Lexeme, t.Literal)
|
||||
}
|
||||
|
||||
func isKeyword(token *token) bool {
|
||||
for _, kt := range keywordTokenTypes {
|
||||
if token.Type == kt {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
@ -1,61 +0,0 @@
|
|||
// Code generated by "stringer -type tokenType -linecomment -trimprefix tokenType"; DO NOT EDIT.
|
||||
|
||||
package main
|
||||
|
||||
import "strconv"
|
||||
|
||||
func _() {
|
||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||
// Re-run the stringer command to generate them again.
|
||||
var x [1]struct{}
|
||||
_ = x[tokenTypeLeftParen-0]
|
||||
_ = x[tokenTypeRightParen-1]
|
||||
_ = x[tokenTypeLeftBrace-2]
|
||||
_ = x[tokenTypeRightBrace-3]
|
||||
_ = x[tokenTypeComma-4]
|
||||
_ = x[tokenTypeDot-5]
|
||||
_ = x[tokenTypeMinus-6]
|
||||
_ = x[tokenTypePlus-7]
|
||||
_ = x[tokenTypeSemicolon-8]
|
||||
_ = x[tokenTypeSlash-9]
|
||||
_ = x[tokenTypeStar-10]
|
||||
_ = x[tokenTypeBang-11]
|
||||
_ = x[tokenTypeBangEq-12]
|
||||
_ = x[tokenTypeEqual-13]
|
||||
_ = x[tokenTypeEqualEqual-14]
|
||||
_ = x[tokenTypeGreater-15]
|
||||
_ = x[tokenTypeGreaterEq-16]
|
||||
_ = x[tokenTypeLess-17]
|
||||
_ = x[tokenTypeLessEq-18]
|
||||
_ = x[tokenTypeIdentifier-19]
|
||||
_ = x[tokenTypeString-20]
|
||||
_ = x[tokenTypeNumber-21]
|
||||
_ = x[tokenTypeAnd-22]
|
||||
_ = x[tokenTypeClass-23]
|
||||
_ = x[tokenTypeElse-24]
|
||||
_ = x[tokenTypeFalse-25]
|
||||
_ = x[tokenTypeFun-26]
|
||||
_ = x[tokenTypeFor-27]
|
||||
_ = x[tokenTypeIf-28]
|
||||
_ = x[tokenTypeNil-29]
|
||||
_ = x[tokenTypeOr-30]
|
||||
_ = x[tokenTypePrint-31]
|
||||
_ = x[tokenTypeReturn-32]
|
||||
_ = x[tokenTypeSuper-33]
|
||||
_ = x[tokenTypeThis-34]
|
||||
_ = x[tokenTypeTrue-35]
|
||||
_ = x[tokenTypeVar-36]
|
||||
_ = x[tokenTypeWhile-37]
|
||||
_ = x[tokenTypeEOF-38]
|
||||
}
|
||||
|
||||
const _tokenType_name = "LeftParenRightParenLeftBraceRightBraceCommaDotMinusPlusSemicolonSlashStarBangBangEqEqualEqualEqualGreaterGreaterEqLessLessEqIdentifierStringNumberAndClassElseFalseFunForIfNilOrPrintReturnSuperThisTrueVarWhileEOF"
|
||||
|
||||
var _tokenType_index = [...]uint8{0, 9, 19, 28, 38, 43, 46, 51, 55, 64, 69, 73, 77, 83, 88, 98, 105, 114, 118, 124, 134, 140, 146, 149, 154, 158, 163, 166, 169, 171, 174, 176, 181, 187, 192, 196, 200, 203, 208, 211}
|
||||
|
||||
func (i tokenType) String() string {
|
||||
if i < 0 || i >= tokenType(len(_tokenType_index)-1) {
|
||||
return "tokenType(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||
}
|
||||
return _tokenType_name[_tokenType_index[i]:_tokenType_index[i+1]]
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue