Compare commits

..

7 commits

Author SHA1 Message Date
Petar Kapriš 6403c4578c Add exponentiation operator '**' in lexer 2025-01-22 19:29:10 +01:00
Petar Kapriš 225a2b13ea Move lexer back into main package with updated go.mod 2025-01-22 19:28:38 +01:00
Petar Kapriš 6ac4a512c2 Add intended grammar for expression syntax 2025-01-22 19:25:47 +01:00
Petar Kapriš 9e1ef93990 Add modulo operators to lexer
Tokens MODULO and SLASH_MODULO are going to represent the modulo and
fmod operations on integers and floats, or return a pair with the
divisor and the modulo, respectively.
2025-01-22 19:25:10 +01:00
Petar Kapriš 4178bfefea Add reverse polish notation printer for AST
This is meant to complete chapter 5 challenge 3.
2025-01-22 19:24:34 +01:00
Petar Kapriš a647c39ec4 Uncomment main function 2025-01-22 19:24:03 +01:00
Petar Kapriš c6bde606d6 Add text challenges for end of chapter 5 2025-01-22 19:23:42 +01:00
10 changed files with 149 additions and 56 deletions

10
expr_grammar Normal file
View file

@ -0,0 +1,10 @@
expression -> equality ;
equality -> comparison ( ( "!=" | "==" ) comparison )* ;
comparison -> term ( ( ">" | ">=" | "<" | "<=" ) term )* ;
term -> factor ( ( "-" | "+" ) factor )* ;
factor -> exponent ( ( "/" | "*" | "/." | "/_" | "%" | "/%" ) exponent )* ;
exponent -> unary ( ( "**" ) unary )* ;
unary -> ( "!" | "-" ) unary
| primary ;
primary -> NUMBER | STRING | "true" | "false" | "nil"
| "(" expression ")" ;

22
pj1-go/ast-rpn.go Normal file
View file

@ -0,0 +1,22 @@
package main
import (
"fmt"
)
func rpn(expr Expr) string {
switch e := expr.(type) {
case Binary:
return rpn(e.left) + " " + rpn(e.right) + " " + e.operator.Lexeme
case Grouping:
return rpn(e.expression) // + " group" arguable if this is even wanted
case Literal:
if e.value == nil {
return "nil"
}
return fmt.Sprintf("%v", e.value)
case Unary:
return rpn(e.right) + fmt.Sprintf(" %v", e.operator.Lexeme)
}
return "ERROR: reached impossible branch in print function"
}

View file

@ -1,15 +1,13 @@
// Code generated by tools/gen-ast.go DO NOT EDIT.
package main
import "git.bonsai.cool/kayprish/pj1/pj1-go/lexer"
type Expr interface {
isExpr()
}
type Binary struct {
left Expr
operator lexer.Token
operator Token
right Expr
}
@ -28,7 +26,7 @@ type Literal struct {
func (x Literal) isExpr() {}
type Unary struct {
operator lexer.Token
operator Token
right Expr
}

View file

@ -14,10 +14,11 @@ import (
"unicode/utf8"
)
var modulePath string
// returns a string which contains the current module path, keep in mind this
// program is meant to be run in the root of the module using go generate
//
// This function is no longer useful because the lexer package has been
// deleted, however, it may become useful again soon
func getModulePath() string {
goModFile, err := os.Open("./go.mod")
if err != nil {
@ -44,13 +45,12 @@ func main() {
os.Exit(64)
}
outputDir := os.Args[1]
modulePath = getModulePath()
defineAst(outputDir, "Expr",
[]string{"Binary : left Expr, operator lexer.Token, right Expr",
[]string{"Binary : left Expr, operator Token, right Expr",
"Grouping : expression Expr",
"Literal : value interface{}",
// a literal can be any value that can be printed
"Unary : operator lexer.Token, right Expr"})
"Unary : operator Token, right Expr"})
}
func lowerFirst(s string) string {
@ -74,8 +74,6 @@ func defineAst(outputDir string, baseName string, types []string) {
fmt.Fprintln(f, "// Code generated by tools/gen-ast.go DO NOT EDIT.")
fmt.Fprintln(f, "package main")
fmt.Fprintln(f)
fmt.Fprintln(f, "import \""+modulePath+"/lexer\"")
fmt.Fprintln(f)
// Creates a dummy interface just to limit types which can be
// considered an "Expr"
fmt.Fprintln(f, "type "+baseName+" interface {")

View file

@ -1,10 +1,13 @@
module git.bonsai.cool/kayprish/pj1/pj1-go
go 1.18
go 1.22.0
require golang.org/x/tools v0.1.13-0.20220917004541-4d18923f060e
toolchain go1.22.2
require golang.org/x/tools v0.29.0
require (
golang.org/x/mod v0.12.0 // indirect
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect
golang.org/x/mod v0.22.0 // indirect
golang.org/x/sync v0.10.0 // indirect
golang.org/x/sys v0.29.0 // indirect
)

View file

@ -1,8 +1,14 @@
git.bonsai.cool/kayprish/pj1/pj1-go v0.0.0-20240807135935-04e669c15630 h1:SO1oOi4BAVPmoCkBCeZEcNXiCDZJrM49Y5Fhm/bBuBU=
git.bonsai.cool/kayprish/pj1/pj1-go v0.0.0-20240807135935-04e669c15630/go.mod h1:f4dHsvhBf6lTSjuA+gqssxFOHdkOjPnS4QeufMWvHOM=
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4=
golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/tools v0.1.13-0.20220917004541-4d18923f060e h1:K/LreqAwv7hZaSPyj5LvaiQd2wROouJDabf2r+oBqUw=
golang.org/x/tools v0.1.13-0.20220917004541-4d18923f060e/go.mod h1:VsjNM1dMo+Ofkp5d7y7fOdQZD8MTXSQ4w3EPk65AvKU=
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=

View file

@ -1,4 +1,4 @@
package lexer
package main
import (
"fmt"
@ -23,9 +23,10 @@ const (
MINUS
PLUS
SEMICOLON
STAR
// One or two character tokens.
STAR
STAR_STAR
BANG
BANG_EQUAL
EQUAL
@ -37,6 +38,8 @@ const (
SLASH
SLASH_DOT
SLASH_UNDERSCORE
SLASH_MODULO
MODULO
// Literals
IDENTIFIER
@ -154,6 +157,11 @@ func (l *Lexer) scanToken() {
case ';':
l.addSimpleToken(SEMICOLON)
case '*':
if l.match('*') {
l.addSimpleToken(STAR_STAR)
} else {
l.addSimpleToken(STAR)
}
l.addSimpleToken(STAR)
case '!':
if l.match('=') {
@ -179,6 +187,8 @@ func (l *Lexer) scanToken() {
} else {
l.addSimpleToken(GREATER)
}
case '%':
l.addSimpleToken(MODULO)
case '/':
if l.match('/') {
// A comment goes until the end of the line
@ -211,6 +221,8 @@ func (l *Lexer) scanToken() {
l.addSimpleToken(SLASH_DOT)
} else if l.match('_') {
l.addSimpleToken(SLASH_UNDERSCORE)
} else if l.match('%') {
l.addSimpleToken(SLASH_MODULO)
} else {
l.addSimpleToken(SLASH)
}

View file

@ -6,20 +6,19 @@ import (
"io/ioutil"
"os"
"git.bonsai.cool/kayprish/pj1/pj1-go/lexer"
"git.bonsai.cool/kayprish/pj1/pj1-go/util"
)
// func main() {
// if len(os.Args) > 2 {
// fmt.Println("Usage: pj1-go [script]")
// os.Exit(64)
// } else if len(os.Args) == 2 {
// runFile(os.Args[0])
// } else {
// runPrompt()
// }
// }
func main() {
if len(os.Args) > 2 {
fmt.Println("Usage: pj1-go [script]")
os.Exit(64)
} else if len(os.Args) == 2 {
runFile(os.Args[0])
} else {
runPrompt()
}
}
func runFile(path string) {
bytes, err := ioutil.ReadFile(path)
@ -53,9 +52,9 @@ func runPrompt() {
}
func run(source string) {
l := lexer.NewLexer(source)
l := NewLexer(source)
l.ScanTokens()
var tokens []lexer.Token = l.Tokens
var tokens []Token = l.Tokens
for _, token := range tokens {
fmt.Println(token)

View file

@ -1,6 +1,6 @@
// Code generated by "stringer -type=TokenType"; DO NOT EDIT.
package lexer
package main
import "strconv"
@ -29,32 +29,34 @@ func _() {
_ = x[SLASH-18]
_ = x[SLASH_DOT-19]
_ = x[SLASH_UNDERSCORE-20]
_ = x[IDENTIFIER-21]
_ = x[STRING-22]
_ = x[INTEGER-23]
_ = x[FLOAT-24]
_ = x[AND-25]
_ = x[CLASS-26]
_ = x[ELSE-27]
_ = x[FALSE-28]
_ = x[FOR-29]
_ = x[FUN-30]
_ = x[IF-31]
_ = x[NIL-32]
_ = x[OR-33]
_ = x[PRINT-34]
_ = x[RETURN-35]
_ = x[SUPER-36]
_ = x[THIS-37]
_ = x[TRUE-38]
_ = x[VAR-39]
_ = x[WHILE-40]
_ = x[EOF-41]
_ = x[SLASH_MODULO-21]
_ = x[MODULO-22]
_ = x[IDENTIFIER-23]
_ = x[STRING-24]
_ = x[INTEGER-25]
_ = x[FLOAT-26]
_ = x[AND-27]
_ = x[CLASS-28]
_ = x[ELSE-29]
_ = x[FALSE-30]
_ = x[FOR-31]
_ = x[FUN-32]
_ = x[IF-33]
_ = x[NIL-34]
_ = x[OR-35]
_ = x[PRINT-36]
_ = x[RETURN-37]
_ = x[SUPER-38]
_ = x[THIS-39]
_ = x[TRUE-40]
_ = x[VAR-41]
_ = x[WHILE-42]
_ = x[EOF-43]
}
const _TokenType_name = "LEFT_PARENRIGHT_PARENLEFT_BRACERIGHT_BRACECOMMADOTMINUSPLUSSEMICOLONSTARBANGBANG_EQUALEQUALEQUAL_EQUALGREATERGREATER_EQUALLESSLESS_EQUALSLASHSLASH_DOTSLASH_UNDERSCOREIDENTIFIERSTRINGINTEGERFLOATANDCLASSELSEFALSEFORFUNIFNILORPRINTRETURNSUPERTHISTRUEVARWHILEEOF"
const _TokenType_name = "LEFT_PARENRIGHT_PARENLEFT_BRACERIGHT_BRACECOMMADOTMINUSPLUSSEMICOLONSTARBANGBANG_EQUALEQUALEQUAL_EQUALGREATERGREATER_EQUALLESSLESS_EQUALSLASHSLASH_DOTSLASH_UNDERSCORESLASH_MODULOMODULOIDENTIFIERSTRINGINTEGERFLOATANDCLASSELSEFALSEFORFUNIFNILORPRINTRETURNSUPERTHISTRUEVARWHILEEOF"
var _TokenType_index = [...]uint16{0, 10, 21, 31, 42, 47, 50, 55, 59, 68, 72, 76, 86, 91, 102, 109, 122, 126, 136, 141, 150, 166, 176, 182, 189, 194, 197, 202, 206, 211, 214, 217, 219, 222, 224, 229, 235, 240, 244, 248, 251, 256, 259}
var _TokenType_index = [...]uint16{0, 10, 21, 31, 42, 47, 50, 55, 59, 68, 72, 76, 86, 91, 102, 109, 122, 126, 136, 141, 150, 166, 178, 184, 194, 200, 207, 212, 215, 220, 224, 229, 232, 235, 237, 240, 242, 247, 253, 258, 262, 266, 269, 274, 277}
func (i TokenType) String() string {
if i < 0 || i >= TokenType(len(_TokenType_index)-1) {

43
text-challenges Normal file
View file

@ -0,0 +1,43 @@
Chapter 5, challenge 1:
Earlier, I said that the | , * , and + forms we added to our grammar metasyntax
were just syntactic sugar. Take this grammar:
expr → expr ( "(" ( expr ( "," expr )* )? ")" | "." IDENTIFIER )+
| IDENTIFIER
| NUMBER
Produce a grammar that matches the same language but does not use any of the notational sugar.
solution:
expr -> IDENTIFIER
expr -> NUMBER
expr -> expr callOrIndex
callOrIndex -> "." IDENTIFIER
callOrIndex -> "(" call ")"
call ->
call -> expr
call -> expr callListEnd
callListEnd ->
callListEnd -> "," expr precedingCommaCall
The grammar here clearly describes a number, or an identifier to a number, or a
function call, or a structure access
Chapter 5, challenge 2:
The Visitor pattern lets you emulate the functional style in an object-oriented
language. Devise a complementary pattern for a functional language. It should let
you bundle all of the operations on one type together and let you define new types
easily.
(SML or Haskell would be ideal for this exercise, but Scheme or another Lisp works
as well.)
solution:
Here, we're supposed to be able to define a new type that "inherits" an
existing one and bundle all the function implementations on that type together.
We could simply write the functions by specific type in one place, since Haskell is just that flexible, but that doesn't guarrantee we wrote the needed functions.
Another thing we could do is to put all necessary functions in a single structure, and define a type for this structure, for example if our supertype is Expr, with functions like "printInStr", "execute" and "optimize", our function bundle would look like:
data ExprFuncs a = ExprFuncs (a -> str) (a -> IO ()) (a -> Expr)