Compare commits
No commits in common. "6403c4578c4f5c79f1f292d831d161b8fc4692cc" and "475b15522813a979b256fa8081082175d345c7d8" have entirely different histories.
6403c4578c
...
475b155228
10
expr_grammar
10
expr_grammar
|
@ -1,10 +0,0 @@
|
||||||
expression -> equality ;
|
|
||||||
equality -> comparison ( ( "!=" | "==" ) comparison )* ;
|
|
||||||
comparison -> term ( ( ">" | ">=" | "<" | "<=" ) term )* ;
|
|
||||||
term -> factor ( ( "-" | "+" ) factor )* ;
|
|
||||||
factor -> exponent ( ( "/" | "*" | "/." | "/_" | "%" | "/%" ) exponent )* ;
|
|
||||||
exponent -> unary ( ( "**" ) unary )* ;
|
|
||||||
unary -> ( "!" | "-" ) unary
|
|
||||||
| primary ;
|
|
||||||
primary -> NUMBER | STRING | "true" | "false" | "nil"
|
|
||||||
| "(" expression ")" ;
|
|
|
@ -1,22 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
func rpn(expr Expr) string {
|
|
||||||
switch e := expr.(type) {
|
|
||||||
case Binary:
|
|
||||||
return rpn(e.left) + " " + rpn(e.right) + " " + e.operator.Lexeme
|
|
||||||
case Grouping:
|
|
||||||
return rpn(e.expression) // + " group" arguable if this is even wanted
|
|
||||||
case Literal:
|
|
||||||
if e.value == nil {
|
|
||||||
return "nil"
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%v", e.value)
|
|
||||||
case Unary:
|
|
||||||
return rpn(e.right) + fmt.Sprintf(" %v", e.operator.Lexeme)
|
|
||||||
}
|
|
||||||
return "ERROR: reached impossible branch in print function"
|
|
||||||
}
|
|
|
@ -1,13 +1,15 @@
|
||||||
// Code generated by tools/gen-ast.go DO NOT EDIT.
|
// Code generated by tools/gen-ast.go DO NOT EDIT.
|
||||||
package main
|
package main
|
||||||
|
|
||||||
|
import "git.bonsai.cool/kayprish/pj1/pj1-go/lexer"
|
||||||
|
|
||||||
type Expr interface {
|
type Expr interface {
|
||||||
isExpr()
|
isExpr()
|
||||||
}
|
}
|
||||||
|
|
||||||
type Binary struct {
|
type Binary struct {
|
||||||
left Expr
|
left Expr
|
||||||
operator Token
|
operator lexer.Token
|
||||||
right Expr
|
right Expr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,7 +28,7 @@ type Literal struct {
|
||||||
func (x Literal) isExpr() {}
|
func (x Literal) isExpr() {}
|
||||||
|
|
||||||
type Unary struct {
|
type Unary struct {
|
||||||
operator Token
|
operator lexer.Token
|
||||||
right Expr
|
right Expr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,11 +14,10 @@ import (
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var modulePath string
|
||||||
|
|
||||||
// returns a string which contains the current module path, keep in mind this
|
// returns a string which contains the current module path, keep in mind this
|
||||||
// program is meant to be run in the root of the module using go generate
|
// program is meant to be run in the root of the module using go generate
|
||||||
//
|
|
||||||
// This function is no longer useful because the lexer package has been
|
|
||||||
// deleted, however, it may become useful again soon
|
|
||||||
func getModulePath() string {
|
func getModulePath() string {
|
||||||
goModFile, err := os.Open("./go.mod")
|
goModFile, err := os.Open("./go.mod")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -45,12 +44,13 @@ func main() {
|
||||||
os.Exit(64)
|
os.Exit(64)
|
||||||
}
|
}
|
||||||
outputDir := os.Args[1]
|
outputDir := os.Args[1]
|
||||||
|
modulePath = getModulePath()
|
||||||
defineAst(outputDir, "Expr",
|
defineAst(outputDir, "Expr",
|
||||||
[]string{"Binary : left Expr, operator Token, right Expr",
|
[]string{"Binary : left Expr, operator lexer.Token, right Expr",
|
||||||
"Grouping : expression Expr",
|
"Grouping : expression Expr",
|
||||||
"Literal : value interface{}",
|
"Literal : value interface{}",
|
||||||
// a literal can be any value that can be printed
|
// a literal can be any value that can be printed
|
||||||
"Unary : operator Token, right Expr"})
|
"Unary : operator lexer.Token, right Expr"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func lowerFirst(s string) string {
|
func lowerFirst(s string) string {
|
||||||
|
@ -74,6 +74,8 @@ func defineAst(outputDir string, baseName string, types []string) {
|
||||||
fmt.Fprintln(f, "// Code generated by tools/gen-ast.go DO NOT EDIT.")
|
fmt.Fprintln(f, "// Code generated by tools/gen-ast.go DO NOT EDIT.")
|
||||||
fmt.Fprintln(f, "package main")
|
fmt.Fprintln(f, "package main")
|
||||||
fmt.Fprintln(f)
|
fmt.Fprintln(f)
|
||||||
|
fmt.Fprintln(f, "import \""+modulePath+"/lexer\"")
|
||||||
|
fmt.Fprintln(f)
|
||||||
// Creates a dummy interface just to limit types which can be
|
// Creates a dummy interface just to limit types which can be
|
||||||
// considered an "Expr"
|
// considered an "Expr"
|
||||||
fmt.Fprintln(f, "type "+baseName+" interface {")
|
fmt.Fprintln(f, "type "+baseName+" interface {")
|
||||||
|
|
|
@ -1,13 +1,10 @@
|
||||||
module git.bonsai.cool/kayprish/pj1/pj1-go
|
module git.bonsai.cool/kayprish/pj1/pj1-go
|
||||||
|
|
||||||
go 1.22.0
|
go 1.18
|
||||||
|
|
||||||
toolchain go1.22.2
|
require golang.org/x/tools v0.1.13-0.20220917004541-4d18923f060e
|
||||||
|
|
||||||
require golang.org/x/tools v0.29.0
|
|
||||||
|
|
||||||
require (
|
require (
|
||||||
golang.org/x/mod v0.22.0 // indirect
|
golang.org/x/mod v0.12.0 // indirect
|
||||||
golang.org/x/sync v0.10.0 // indirect
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect
|
||||||
golang.org/x/sys v0.29.0 // indirect
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,14 +1,8 @@
|
||||||
|
git.bonsai.cool/kayprish/pj1/pj1-go v0.0.0-20240807135935-04e669c15630 h1:SO1oOi4BAVPmoCkBCeZEcNXiCDZJrM49Y5Fhm/bBuBU=
|
||||||
|
git.bonsai.cool/kayprish/pj1/pj1-go v0.0.0-20240807135935-04e669c15630/go.mod h1:f4dHsvhBf6lTSjuA+gqssxFOHdkOjPnS4QeufMWvHOM=
|
||||||
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
|
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
|
||||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4=
|
|
||||||
golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
|
||||||
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
|
||||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s=
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
|
||||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
|
||||||
golang.org/x/tools v0.1.13-0.20220917004541-4d18923f060e h1:K/LreqAwv7hZaSPyj5LvaiQd2wROouJDabf2r+oBqUw=
|
golang.org/x/tools v0.1.13-0.20220917004541-4d18923f060e h1:K/LreqAwv7hZaSPyj5LvaiQd2wROouJDabf2r+oBqUw=
|
||||||
golang.org/x/tools v0.1.13-0.20220917004541-4d18923f060e/go.mod h1:VsjNM1dMo+Ofkp5d7y7fOdQZD8MTXSQ4w3EPk65AvKU=
|
golang.org/x/tools v0.1.13-0.20220917004541-4d18923f060e/go.mod h1:VsjNM1dMo+Ofkp5d7y7fOdQZD8MTXSQ4w3EPk65AvKU=
|
||||||
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
|
|
||||||
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package main
|
package lexer
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
@ -23,10 +23,9 @@ const (
|
||||||
MINUS
|
MINUS
|
||||||
PLUS
|
PLUS
|
||||||
SEMICOLON
|
SEMICOLON
|
||||||
|
STAR
|
||||||
|
|
||||||
// One or two character tokens.
|
// One or two character tokens.
|
||||||
STAR
|
|
||||||
STAR_STAR
|
|
||||||
BANG
|
BANG
|
||||||
BANG_EQUAL
|
BANG_EQUAL
|
||||||
EQUAL
|
EQUAL
|
||||||
|
@ -38,8 +37,6 @@ const (
|
||||||
SLASH
|
SLASH
|
||||||
SLASH_DOT
|
SLASH_DOT
|
||||||
SLASH_UNDERSCORE
|
SLASH_UNDERSCORE
|
||||||
SLASH_MODULO
|
|
||||||
MODULO
|
|
||||||
|
|
||||||
// Literals
|
// Literals
|
||||||
IDENTIFIER
|
IDENTIFIER
|
||||||
|
@ -157,11 +154,6 @@ func (l *Lexer) scanToken() {
|
||||||
case ';':
|
case ';':
|
||||||
l.addSimpleToken(SEMICOLON)
|
l.addSimpleToken(SEMICOLON)
|
||||||
case '*':
|
case '*':
|
||||||
if l.match('*') {
|
|
||||||
l.addSimpleToken(STAR_STAR)
|
|
||||||
} else {
|
|
||||||
l.addSimpleToken(STAR)
|
|
||||||
}
|
|
||||||
l.addSimpleToken(STAR)
|
l.addSimpleToken(STAR)
|
||||||
case '!':
|
case '!':
|
||||||
if l.match('=') {
|
if l.match('=') {
|
||||||
|
@ -187,8 +179,6 @@ func (l *Lexer) scanToken() {
|
||||||
} else {
|
} else {
|
||||||
l.addSimpleToken(GREATER)
|
l.addSimpleToken(GREATER)
|
||||||
}
|
}
|
||||||
case '%':
|
|
||||||
l.addSimpleToken(MODULO)
|
|
||||||
case '/':
|
case '/':
|
||||||
if l.match('/') {
|
if l.match('/') {
|
||||||
// A comment goes until the end of the line
|
// A comment goes until the end of the line
|
||||||
|
@ -221,8 +211,6 @@ func (l *Lexer) scanToken() {
|
||||||
l.addSimpleToken(SLASH_DOT)
|
l.addSimpleToken(SLASH_DOT)
|
||||||
} else if l.match('_') {
|
} else if l.match('_') {
|
||||||
l.addSimpleToken(SLASH_UNDERSCORE)
|
l.addSimpleToken(SLASH_UNDERSCORE)
|
||||||
} else if l.match('%') {
|
|
||||||
l.addSimpleToken(SLASH_MODULO)
|
|
||||||
} else {
|
} else {
|
||||||
l.addSimpleToken(SLASH)
|
l.addSimpleToken(SLASH)
|
||||||
}
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
// Code generated by "stringer -type=TokenType"; DO NOT EDIT.
|
// Code generated by "stringer -type=TokenType"; DO NOT EDIT.
|
||||||
|
|
||||||
package main
|
package lexer
|
||||||
|
|
||||||
import "strconv"
|
import "strconv"
|
||||||
|
|
||||||
|
@ -29,34 +29,32 @@ func _() {
|
||||||
_ = x[SLASH-18]
|
_ = x[SLASH-18]
|
||||||
_ = x[SLASH_DOT-19]
|
_ = x[SLASH_DOT-19]
|
||||||
_ = x[SLASH_UNDERSCORE-20]
|
_ = x[SLASH_UNDERSCORE-20]
|
||||||
_ = x[SLASH_MODULO-21]
|
_ = x[IDENTIFIER-21]
|
||||||
_ = x[MODULO-22]
|
_ = x[STRING-22]
|
||||||
_ = x[IDENTIFIER-23]
|
_ = x[INTEGER-23]
|
||||||
_ = x[STRING-24]
|
_ = x[FLOAT-24]
|
||||||
_ = x[INTEGER-25]
|
_ = x[AND-25]
|
||||||
_ = x[FLOAT-26]
|
_ = x[CLASS-26]
|
||||||
_ = x[AND-27]
|
_ = x[ELSE-27]
|
||||||
_ = x[CLASS-28]
|
_ = x[FALSE-28]
|
||||||
_ = x[ELSE-29]
|
_ = x[FOR-29]
|
||||||
_ = x[FALSE-30]
|
_ = x[FUN-30]
|
||||||
_ = x[FOR-31]
|
_ = x[IF-31]
|
||||||
_ = x[FUN-32]
|
_ = x[NIL-32]
|
||||||
_ = x[IF-33]
|
_ = x[OR-33]
|
||||||
_ = x[NIL-34]
|
_ = x[PRINT-34]
|
||||||
_ = x[OR-35]
|
_ = x[RETURN-35]
|
||||||
_ = x[PRINT-36]
|
_ = x[SUPER-36]
|
||||||
_ = x[RETURN-37]
|
_ = x[THIS-37]
|
||||||
_ = x[SUPER-38]
|
_ = x[TRUE-38]
|
||||||
_ = x[THIS-39]
|
_ = x[VAR-39]
|
||||||
_ = x[TRUE-40]
|
_ = x[WHILE-40]
|
||||||
_ = x[VAR-41]
|
_ = x[EOF-41]
|
||||||
_ = x[WHILE-42]
|
|
||||||
_ = x[EOF-43]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const _TokenType_name = "LEFT_PARENRIGHT_PARENLEFT_BRACERIGHT_BRACECOMMADOTMINUSPLUSSEMICOLONSTARBANGBANG_EQUALEQUALEQUAL_EQUALGREATERGREATER_EQUALLESSLESS_EQUALSLASHSLASH_DOTSLASH_UNDERSCORESLASH_MODULOMODULOIDENTIFIERSTRINGINTEGERFLOATANDCLASSELSEFALSEFORFUNIFNILORPRINTRETURNSUPERTHISTRUEVARWHILEEOF"
|
const _TokenType_name = "LEFT_PARENRIGHT_PARENLEFT_BRACERIGHT_BRACECOMMADOTMINUSPLUSSEMICOLONSTARBANGBANG_EQUALEQUALEQUAL_EQUALGREATERGREATER_EQUALLESSLESS_EQUALSLASHSLASH_DOTSLASH_UNDERSCOREIDENTIFIERSTRINGINTEGERFLOATANDCLASSELSEFALSEFORFUNIFNILORPRINTRETURNSUPERTHISTRUEVARWHILEEOF"
|
||||||
|
|
||||||
var _TokenType_index = [...]uint16{0, 10, 21, 31, 42, 47, 50, 55, 59, 68, 72, 76, 86, 91, 102, 109, 122, 126, 136, 141, 150, 166, 178, 184, 194, 200, 207, 212, 215, 220, 224, 229, 232, 235, 237, 240, 242, 247, 253, 258, 262, 266, 269, 274, 277}
|
var _TokenType_index = [...]uint16{0, 10, 21, 31, 42, 47, 50, 55, 59, 68, 72, 76, 86, 91, 102, 109, 122, 126, 136, 141, 150, 166, 176, 182, 189, 194, 197, 202, 206, 211, 214, 217, 219, 222, 224, 229, 235, 240, 244, 248, 251, 256, 259}
|
||||||
|
|
||||||
func (i TokenType) String() string {
|
func (i TokenType) String() string {
|
||||||
if i < 0 || i >= TokenType(len(_TokenType_index)-1) {
|
if i < 0 || i >= TokenType(len(_TokenType_index)-1) {
|
|
@ -6,19 +6,20 @@ import (
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"git.bonsai.cool/kayprish/pj1/pj1-go/lexer"
|
||||||
"git.bonsai.cool/kayprish/pj1/pj1-go/util"
|
"git.bonsai.cool/kayprish/pj1/pj1-go/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
// func main() {
|
||||||
if len(os.Args) > 2 {
|
// if len(os.Args) > 2 {
|
||||||
fmt.Println("Usage: pj1-go [script]")
|
// fmt.Println("Usage: pj1-go [script]")
|
||||||
os.Exit(64)
|
// os.Exit(64)
|
||||||
} else if len(os.Args) == 2 {
|
// } else if len(os.Args) == 2 {
|
||||||
runFile(os.Args[0])
|
// runFile(os.Args[0])
|
||||||
} else {
|
// } else {
|
||||||
runPrompt()
|
// runPrompt()
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func runFile(path string) {
|
func runFile(path string) {
|
||||||
bytes, err := ioutil.ReadFile(path)
|
bytes, err := ioutil.ReadFile(path)
|
||||||
|
@ -52,9 +53,9 @@ func runPrompt() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func run(source string) {
|
func run(source string) {
|
||||||
l := NewLexer(source)
|
l := lexer.NewLexer(source)
|
||||||
l.ScanTokens()
|
l.ScanTokens()
|
||||||
var tokens []Token = l.Tokens
|
var tokens []lexer.Token = l.Tokens
|
||||||
|
|
||||||
for _, token := range tokens {
|
for _, token := range tokens {
|
||||||
fmt.Println(token)
|
fmt.Println(token)
|
||||||
|
|
|
@ -1,43 +0,0 @@
|
||||||
Chapter 5, challenge 1:
|
|
||||||
Earlier, I said that the | , * , and + forms we added to our grammar metasyntax
|
|
||||||
were just syntactic sugar. Take this grammar:
|
|
||||||
|
|
||||||
expr → expr ( "(" ( expr ( "," expr )* )? ")" | "." IDENTIFIER )+
|
|
||||||
| IDENTIFIER
|
|
||||||
| NUMBER
|
|
||||||
|
|
||||||
Produce a grammar that matches the same language but does not use any of the notational sugar.
|
|
||||||
|
|
||||||
solution:
|
|
||||||
expr -> IDENTIFIER
|
|
||||||
expr -> NUMBER
|
|
||||||
expr -> expr callOrIndex
|
|
||||||
|
|
||||||
callOrIndex -> "." IDENTIFIER
|
|
||||||
callOrIndex -> "(" call ")"
|
|
||||||
|
|
||||||
call ->
|
|
||||||
call -> expr
|
|
||||||
call -> expr callListEnd
|
|
||||||
|
|
||||||
callListEnd ->
|
|
||||||
callListEnd -> "," expr precedingCommaCall
|
|
||||||
|
|
||||||
|
|
||||||
The grammar here clearly describes a number, or an identifier to a number, or a
|
|
||||||
function call, or a structure access
|
|
||||||
|
|
||||||
Chapter 5, challenge 2:
|
|
||||||
The Visitor pattern lets you emulate the functional style in an object-oriented
|
|
||||||
language. Devise a complementary pattern for a functional language. It should let
|
|
||||||
you bundle all of the operations on one type together and let you define new types
|
|
||||||
easily.
|
|
||||||
(SML or Haskell would be ideal for this exercise, but Scheme or another Lisp works
|
|
||||||
as well.)
|
|
||||||
|
|
||||||
solution:
|
|
||||||
Here, we're supposed to be able to define a new type that "inherits" an
|
|
||||||
existing one and bundle all the function implementations on that type together.
|
|
||||||
We could simply write the functions by specific type in one place, since Haskell is just that flexible, but that doesn't guarrantee we wrote the needed functions.
|
|
||||||
Another thing we could do is to put all necessary functions in a single structure, and define a type for this structure, for example if our supertype is Expr, with functions like "printInStr", "execute" and "optimize", our function bundle would look like:
|
|
||||||
data ExprFuncs a = ExprFuncs (a -> str) (a -> IO ()) (a -> Expr)
|
|
Loading…
Reference in a new issue