holy carp it parses

This commit is contained in:
basil 2025-06-07 21:16:05 -04:00
parent dc89e81cc5
commit edcbfde351
Signed by: basil
SSH key fingerprint: SHA256:y04xIFL/yqNaG9ae9Vl95vELtHfApGAIoOGLeVLP/fE
3 changed files with 240 additions and 6 deletions

25
main.go
View file

@ -29,10 +29,10 @@ func runFile(filename string) {
bs, err := os.ReadFile(filename)
if err != nil {
fmt.Printf("unable to read file '%s':\n\t%s", filename, err.Error())
os.Exit(1)
os.Exit(64)
}
if run(string(bs)) {
if !run(string(bs)) {
os.Exit(65)
}
}
@ -59,20 +59,33 @@ func run(source string) bool {
s := newScanner(source)
tokens, ok := s.ScanTokens()
if !ok {
return true
return false
}
for _, token := range tokens {
fmt.Println(token)
p := newParser(tokens)
expr, err := p.Parse()
if err != nil {
return false
}
return false
printer := &astPrinter{}
fmt.Println(printer.print(expr))
return true
}
func reportErr(line int, message string) {
report(line, "", message)
}
func reportSyntaxError(token *token, message string) {
if token.Type == tokenTypeEOF {
report(token.Line, "at EOF", message)
} else {
report(token.Line, "at \""+token.Lexeme+"\"", message)
}
}
func report(line int, where, message string) {
fmt.Printf("[line %d] Error%s: %s\n", line, where, message)
}

201
parser.go Normal file
View file

@ -0,0 +1,201 @@
package main
type parseError interface {
Error() string
Token() *token
}
type syntaxError struct {
token *token
message string
}
func newSyntaxError(token *token, message string) *syntaxError {
return &syntaxError{token, message}
}
func (s *syntaxError) Token() *token {
return s.token
}
func (s *syntaxError) Error() string {
return s.message
}
type operandFunc func() (expr, error)
type parser struct {
tokens []*token
current int
}
func newParser(tokens []*token) *parser {
return &parser{
tokens: tokens,
current: 0,
}
}
func (p *parser) Parse() (expr, error) {
e, err := p.expression()
if err != nil {
return e, err
}
return e, err
}
// expression -> equality
func (p *parser) expression() (expr, error) {
return p.equality()
}
func (p *parser) parseLeftAssocBinOps(operand operandFunc, tokenTypes ...tokenType) (expr, error) {
e, err := operand()
if err != nil {
return e, err
}
for p.match(tokenTypes...) {
op := p.previous()
r, err := operand()
if err != nil {
return e, err
}
e = &binaryExpr{e, op, r}
}
return e, nil
}
// eqality -> comparison ( ("!=" | "==") comparison )*
func (p *parser) equality() (expr, error) {
return p.parseLeftAssocBinOps(
p.comparison,
tokenTypeBangEq, tokenTypeEqualEqual,
)
}
// comparison -> term ( ( ">" | ">=" | "<" | "<=" ) term )*
func (p *parser) comparison() (expr, error) {
return p.parseLeftAssocBinOps(
p.term,
tokenTypeGreater, tokenTypeGreaterEq,
tokenTypeLess, tokenTypeLessEq,
)
}
// term -> factor ( ( "-" | "+" ) factor )*
func (p *parser) term() (expr, error) {
return p.parseLeftAssocBinOps(
p.factor,
tokenTypeMinus, tokenTypePlus,
)
}
// factor -> unary ( ( "*" | "/" ) unary )*
func (p *parser) factor() (expr, error) {
return p.parseLeftAssocBinOps(
p.unary,
tokenTypeSlash, tokenTypeStar,
)
}
// unary -> ( "!" | "-" ) unary | primary;
func (p *parser) unary() (expr, error) {
if p.match(tokenTypeBang, tokenTypeMinus) {
op := p.previous()
r, err := p.unary()
return &unaryExpr{op, r}, err
}
return p.primary()
}
// primary -> STRING | NUMBER | "true" | "false" | "nil" | "(" expression ")"
func (p *parser) primary() (expr, error) {
if p.match(tokenTypeTrue) {
return &literalExpr{true}, nil
}
if p.match(tokenTypeFalse) {
return &literalExpr{false}, nil
}
if p.match(tokenTypeNil) {
return &literalExpr{nil}, nil
}
if p.match(tokenTypeString, tokenTypeNumber) {
return &literalExpr{p.previous().Literal}, nil
}
if p.match(tokenTypeLeftParen) {
e, err := p.expression()
if err != nil {
return nil, err
}
_, err = p.consume(tokenTypeRightParen, "expected ')' after expression")
return &groupingExpr{e}, err
}
return nil, newSyntaxError(p.peek(), "expected expression")
}
func (p *parser) consume(tokenType tokenType, msg string) (*token, error) {
if p.check(tokenType) {
return p.advance(), nil
}
return nil, newSyntaxError(p.peek(), msg)
}
func (p *parser) synchronize() {
p.advance()
for !p.isAtEnd() {
if p.previous().Type == tokenTypeSemicolon {
return
}
if isKeyword(p.peek()) {
return
}
p.advance()
}
}
func (p *parser) match(tokenTypes ...tokenType) bool {
for _, t := range tokenTypes {
if p.check(t) {
p.advance()
return true
}
}
return false
}
func (p *parser) check(t tokenType) bool {
if p.isAtEnd() {
return false
}
return p.peek().Type == t
}
func (p *parser) advance() *token {
if !p.isAtEnd() {
p.current += 1
}
return p.previous()
}
func (p *parser) isAtEnd() bool {
return p.peek().Type == tokenTypeEOF
}
func (p *parser) peek() *token {
return p.tokens[p.current]
}
func (p *parser) previous() *token {
return p.tokens[p.current-1]
}

View file

@ -59,6 +59,17 @@ const (
tokenTypeEOF
)
var keywordTokenTypes = []tokenType{
tokenTypeClass,
tokenTypeFun,
tokenTypeVar,
tokenTypeFor,
tokenTypeIf,
tokenTypeReturn,
tokenTypeWhile,
tokenTypePrint,
}
type token struct {
Type tokenType
Lexeme string
@ -69,3 +80,12 @@ type token struct {
func (t token) String() string {
return fmt.Sprintf("%s %s %+v", t.Type, t.Lexeme, t.Literal)
}
func isKeyword(token *token) bool {
for _, kt := range keywordTokenTypes {
if token.Type == kt {
return true
}
}
return false
}