Compare commits

...

2 commits

8 changed files with 257 additions and 39 deletions

56
pj1-go/ast-print.go Normal file
View file

@ -0,0 +1,56 @@
package main
import (
"fmt"
"strings"
)
func print(expr Expr) string {
switch e := expr.(type) {
case Binary:
return parenthesize(e.operator.Lexeme,
e.left,
e.right)
case Grouping:
return parenthesize("group", e.expression)
case Literal:
if e.value == nil {
return "nil"
}
return fmt.Sprintf("%v", e.value)
case Unary:
return parenthesize(e.operator.Lexeme, e.right)
}
return "ERROR: reached impossible branch in print function"
}
func parenthesize(name string, exprs ...Expr) string {
var sb strings.Builder
sb.WriteString("(" + name)
for _, expr := range exprs {
sb.WriteString(" ")
sb.WriteString(print(expr))
}
sb.WriteString(")")
return sb.String()
}
// This was briefly a testing function for the AST class at the end of ch. 5
/*
func main() {
var expression Expr = Binary{
Unary{
lexer.NewToken(lexer.MINUS, "-", nil, 1),
Literal{123},
},
lexer.NewToken(lexer.STAR, "*", nil, 1),
Grouping{
Literal{45.67},
},
}
fmt.Println(print(expression))
}
*/

36
pj1-go/ast.go Normal file
View file

@ -0,0 +1,36 @@
// Code generated by tools/gen-ast.go DO NOT EDIT.
package main
import "git.bonsai.cool/kayprish/pj1/pj1-go/lexer"
type Expr interface {
isExpr()
}
type Binary struct {
left Expr
operator lexer.Token
right Expr
}
func (x Binary) isExpr() {}
type Grouping struct {
expression Expr
}
func (x Grouping) isExpr() {}
type Literal struct {
value interface{}
}
func (x Literal) isExpr() {}
type Unary struct {
operator lexer.Token
right Expr
}
func (x Unary) isExpr() {}

114
pj1-go/gen-ast.go Normal file
View file

@ -0,0 +1,114 @@
//go:build generate
//go:generate go run ./gen-ast.go ./
package main
import (
"bufio"
"fmt"
"io"
"os"
"strings"
"unicode"
"unicode/utf8"
)
var modulePath string
// returns a string which contains the current module path, keep in mind this
// program is meant to be run in the root of the module using go generate
func getModulePath() string {
goModFile, err := os.Open("./go.mod")
if err != nil {
fmt.Fprintln(os.Stderr, "Could not open go.mod file, is this command being run in the module root folder?")
os.Exit(1)
}
scanner := bufio.NewScanner(goModFile)
scanner.Scan()
fstLine := scanner.Text()
if fstLine[:7] != "module " {
fmt.Fprintln(os.Stderr, "The first line of go.mod seems malformed")
os.Exit(1)
}
goModFile.Close()
path := strings.TrimSpace(fstLine[7:])
return path
}
func main() {
if len(os.Args) != 2 {
fmt.Fprintln(os.Stderr, "Usage: go run gen-ast.go <output directory>")
os.Exit(64)
}
outputDir := os.Args[1]
modulePath = getModulePath()
defineAst(outputDir, "Expr",
[]string{"Binary : left Expr, operator lexer.Token, right Expr",
"Grouping : expression Expr",
"Literal : value interface{}",
// a literal can be any value that can be printed
"Unary : operator lexer.Token, right Expr"})
}
func lowerFirst(s string) string {
if s == "" {
return ""
}
r, n := utf8.DecodeRuneInString(s)
return string(unicode.ToLower(r)) + s[n:]
}
func defineAst(outputDir string, baseName string, types []string) {
fileName := "ast"
var path string = outputDir + "/" + fileName + ".go"
f, err := os.Create(path)
if err != nil {
fmt.Fprintln(os.Stderr, "Could not open file \""+path+"\"")
return
}
defer f.Close()
fmt.Fprintln(f, "// Code generated by tools/gen-ast.go DO NOT EDIT.")
fmt.Fprintln(f, "package main")
fmt.Fprintln(f)
fmt.Fprintln(f, "import \""+modulePath+"/lexer\"")
fmt.Fprintln(f)
// Creates a dummy interface just to limit types which can be
// considered an "Expr"
fmt.Fprintln(f, "type "+baseName+" interface {")
fmt.Fprintln(f, "\tis"+baseName+"()")
fmt.Fprintln(f, "}")
fmt.Fprintln(f)
// The AST types.
for _, t := range types {
tSplit := strings.Split(t, ":")
typeName := strings.TrimSpace(tSplit[0])
fields := strings.TrimSpace(tSplit[1])
defineType(f, baseName, typeName, fields)
}
}
func defineType(f io.Writer, baseName string, typeName string, fieldList string) {
fmt.Fprintln(f, "type "+typeName+" struct {")
// Fields.
var fields []string = strings.Split(fieldList, ", ")
for _, field := range fields {
fmt.Fprintln(f, "\t"+field)
}
fmt.Fprintln(f, "}")
fmt.Fprintln(f)
// Interface dummy function
fmt.Fprintln(f, "func (x "+typeName+") is"+baseName+"() {}")
fmt.Fprintln(f)
// TODO: may have to generate constructor, according to top of
// page 110, right now it seems that defining structs without
// the constructor does the job
}

View file

@ -1,3 +1,5 @@
git.bonsai.cool/kayprish/pj1/pj1-go v0.0.0-20240807135935-04e669c15630 h1:SO1oOi4BAVPmoCkBCeZEcNXiCDZJrM49Y5Fhm/bBuBU=
git.bonsai.cool/kayprish/pj1/pj1-go v0.0.0-20240807135935-04e669c15630/go.mod h1:f4dHsvhBf6lTSjuA+gqssxFOHdkOjPnS4QeufMWvHOM=
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s=

View file

@ -1,4 +1,4 @@
package main package lexer
import ( import (
"fmt" "fmt"
@ -6,6 +6,8 @@ import (
"strconv" "strconv"
"strings" "strings"
"unicode/utf8" "unicode/utf8"
"git.bonsai.cool/kayprish/pj1/pj1-go/util"
) )
type TokenType int type TokenType int
@ -86,7 +88,7 @@ var keywords = map[string]TokenType{
type Token struct { type Token struct {
ttype TokenType ttype TokenType
lexeme string Lexeme string
literal interface{} literal interface{}
line int line int
} }
@ -97,12 +99,12 @@ func NewToken(ttype TokenType, lexeme string, literal interface{}, line int) Tok
} }
func (t Token) String() string { func (t Token) String() string {
return fmt.Sprintf("%v %v %v", t.ttype, t.lexeme, t.literal) return fmt.Sprintf("%v %v %v", t.ttype, t.Lexeme, t.literal)
} }
type Lexer struct { type Lexer struct {
source string source string
tokens []Token Tokens []Token
startByte int startByte int
currentByte int currentByte int
@ -123,7 +125,7 @@ func (l *Lexer) ScanTokens() {
l.scanToken() l.scanToken()
} }
l.tokens = append(l.tokens, NewToken(EOF, "", nil, l.line)) l.Tokens = append(l.Tokens, NewToken(EOF, "", nil, l.line))
} }
func (l Lexer) atEnd() bool { func (l Lexer) atEnd() bool {
@ -226,7 +228,7 @@ func (l *Lexer) scanToken() {
} else { } else {
// TODO: if there are multiple bad characters // TODO: if there are multiple bad characters
// coalesce similar errors into one // coalesce similar errors into one
error(l.line, fmt.Sprintf("Unexpected character, %v.", c)) util.Error(l.line, fmt.Sprintf("Unexpected character, %v.", c))
} }
} }
} }
@ -351,7 +353,7 @@ func (l *Lexer) str() {
case 'e': case 'e':
c = '\x1b' c = '\x1b'
default: default:
error(l.line, fmt.Sprintf("Invalid escape sequence \\%v.", l.peek())) util.Error(l.line, fmt.Sprintf("Invalid escape sequence \\%v.", l.peek()))
return return
} }
} }
@ -360,7 +362,7 @@ func (l *Lexer) str() {
} }
if l.atEnd() { if l.atEnd() {
error(l.line, "Unterminated string.") util.Error(l.line, "Unterminated string.")
return return
} }
@ -378,5 +380,5 @@ func (l *Lexer) addSimpleToken(ttype TokenType) {
func (l *Lexer) addToken(ttype TokenType, literal interface{}) { func (l *Lexer) addToken(ttype TokenType, literal interface{}) {
text := l.source[l.startByte:l.currentByte] text := l.source[l.startByte:l.currentByte]
l.tokens = append(l.tokens, NewToken(ttype, text, literal, l.line)) l.Tokens = append(l.Tokens, NewToken(ttype, text, literal, l.line))
} }

View file

@ -1,6 +1,6 @@
// Code generated by "stringer -type=TokenType"; DO NOT EDIT. // Code generated by "stringer -type=TokenType"; DO NOT EDIT.
package main package lexer
import "strconv" import "strconv"

View file

@ -5,22 +5,21 @@ import (
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os" "os"
"git.bonsai.cool/kayprish/pj1/pj1-go/lexer"
"git.bonsai.cool/kayprish/pj1/pj1-go/util"
) )
var ( // func main() {
hadError bool = false // if len(os.Args) > 2 {
) // fmt.Println("Usage: pj1-go [script]")
// os.Exit(64)
func main() { // } else if len(os.Args) == 2 {
if len(os.Args) > 2 { // runFile(os.Args[0])
fmt.Println("Usage: pj1-go [script]") // } else {
os.Exit(64) // runPrompt()
} else if len(os.Args) == 2 { // }
runFile(os.Args[0]) // }
} else {
runPrompt()
}
}
func runFile(path string) { func runFile(path string) {
bytes, err := ioutil.ReadFile(path) bytes, err := ioutil.ReadFile(path)
@ -30,7 +29,7 @@ func runFile(path string) {
} }
run(string(bytes[:])) run(string(bytes[:]))
if hadError { if util.HadError {
os.Exit(65) os.Exit(65)
} }
} }
@ -46,7 +45,7 @@ func runPrompt() {
fmt.Println(line) fmt.Println(line)
run(line) run(line)
hadError = false util.HadError = false
} }
if err := scanner.Err(); err != nil { if err := scanner.Err(); err != nil {
fmt.Fprintln(os.Stderr, "reading standard input:", err) fmt.Fprintln(os.Stderr, "reading standard input:", err)
@ -54,21 +53,11 @@ func runPrompt() {
} }
func run(source string) { func run(source string) {
lexer := NewLexer(source) l := lexer.NewLexer(source)
lexer.ScanTokens() l.ScanTokens()
var tokens []Token = lexer.tokens var tokens []lexer.Token = l.Tokens
for _, token := range tokens { for _, token := range tokens {
fmt.Println(token) fmt.Println(token)
} }
} }
// TODO: might have to rename
func error(line int, msg string) {
report(line, "", msg)
hadError = true
}
func report(line int, where string, msg string) {
fmt.Fprintln(os.Stderr, "[line "+fmt.Sprint(line)+"] Error"+where+": "+msg)
}

19
pj1-go/util/error.go Normal file
View file

@ -0,0 +1,19 @@
package util
import (
"fmt"
"os"
)
var (
HadError bool = false
)
func Error(line int, msg string) {
report(line, "", msg)
HadError = true
}
func report(line int, where string, msg string) {
fmt.Fprintln(os.Stderr, "[line "+fmt.Sprint(line)+"] Error"+where+": "+msg)
}