Skip to content

Commit 344a14e

Browse files
committed
added parseLetStatement
1 parent d6ddc1a commit 344a14e

File tree

10 files changed

+215
-105
lines changed

10 files changed

+215
-105
lines changed

.README.md.swp

12 KB
Binary file not shown.

.github/workflows/test.yml

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
on: [push, pull_request]
2+
name: Test
3+
jobs:
4+
test:
5+
strategy:
6+
matrix:
7+
go-version: [1.13.x, 1.12.x]
8+
platform: [ubuntu-latest, macos-latest]
9+
runs-on: ${{ matrix.platform }}
10+
steps:
11+
- name: Install Go
12+
uses: actions/setup-go@v1
13+
with:
14+
go-version: ${{ matrix.go-version }}
15+
- name: Checkout code
16+
uses: actions/checkout@v2
17+
- name: Test
18+
run: go test ./...

ast/ast.go

+15-16
Original file line numberDiff line numberDiff line change
@@ -2,46 +2,45 @@ package ast
22

33
import "github.com/uchiiii/gibbon/token"
44

5-
type Node interface{
5+
type Node interface {
66
TokenLiteral() string
77
}
88

9-
type Statement interface{
9+
type Statement interface {
1010
Node
1111
statementNode()
1212
}
1313

14-
type Expression interface{
14+
type Expression interface {
1515
Node
1616
expressionNode()
1717
}
1818

19-
type Program struct{
19+
type Program struct {
2020
Statements []Statement
2121
}
2222

23-
func (p* Program) TokenLiteral() string{
24-
if len(p.Statements) > 0{
23+
func (p *Program) TokenLiteral() string {
24+
if len(p.Statements) > 0 {
2525
return p.Statements[0].TokenLiteral()
26-
}else{
26+
} else {
2727
return ""
2828
}
2929
}
3030

31-
type LetStatement struct{
31+
type LetStatement struct {
3232
Token token.Token
33-
Name *Identifier
33+
Name *Identifier
3434
Value Expression
3535
}
3636

37-
func (ls *LetStatement) statementNode(){}
38-
func (ls *LetStatement) TokenLiteral() string {return ls.Token.Literal}
37+
func (ls *LetStatement) statementNode() {}
38+
func (ls *LetStatement) TokenLiteral() string { return ls.Token.Literal }
3939

40-
type Identifier struct{
41-
Token token.Token
40+
type Identifier struct {
41+
Token token.Token
4242
Value string
4343
}
4444

45-
func (i *Identifier) expressionNode(){}
46-
func (i *Identifier) TokenLiteral() string {return i.Token.Literal}
47-
45+
func (i *Identifier) expressionNode() {}
46+
func (i *Identifier) TokenLiteral() string { return i.Token.Literal }

lexer/lexer.go

+29-29
Original file line numberDiff line numberDiff line change
@@ -2,42 +2,42 @@ package lexer
22

33
import "github.com/uchiiii/gibbon/token"
44

5-
type lexer struct{
6-
input string
7-
position int
5+
type Lexer struct {
6+
input string
7+
position int
88
readPosition int
9-
ch byte
9+
ch byte
1010
}
1111

12-
func New(input string) *lexer{
13-
l := &lexer{input: input}
12+
func New(input string) *Lexer {
13+
l := &Lexer{input: input}
1414
l.readChar()
1515
return l
1616
}
1717

18-
func (l *lexer) readChar(){
19-
if l.readPosition >= len(l.input){
18+
func (l *Lexer) readChar() {
19+
if l.readPosition >= len(l.input) {
2020
l.ch = 0
21-
}else{
21+
} else {
2222
l.ch = l.input[l.readPosition]
2323
}
2424
l.position = l.readPosition
2525
l.readPosition += 1
2626
}
2727

28-
func (l *lexer) NextToken() token.Token {
28+
func (l *Lexer) NextToken() token.Token {
2929
var tok token.Token
3030

3131
l.skipWhitespace()
3232

33-
switch l.ch{
33+
switch l.ch {
3434
case '=':
35-
if l.peekChar() == '='{
35+
if l.peekChar() == '=' {
3636
ch := l.ch
3737
l.readChar()
3838
literal := string(ch) + string(l.ch)
3939
tok = token.Token{Type: token.EQ, Literal: literal}
40-
}else{
40+
} else {
4141
tok = newToken(token.ASSIGN, l.ch)
4242
}
4343
case ';':
@@ -53,12 +53,12 @@ func (l *lexer) NextToken() token.Token {
5353
case '-':
5454
tok = newToken(token.MINUS, l.ch)
5555
case '!':
56-
if l.peekChar() == '='{
56+
if l.peekChar() == '=' {
5757
ch := l.ch
5858
l.readChar()
5959
literal := string(ch) + string(l.ch)
6060
tok = token.Token{Type: token.NOT_EQ, Literal: literal}
61-
}else{
61+
} else {
6262
tok = newToken(token.BANG, l.ch)
6363
}
6464
case '/':
@@ -77,11 +77,11 @@ func (l *lexer) NextToken() token.Token {
7777
tok.Literal = ""
7878
tok.Type = token.EOF
7979
default:
80-
if isLetter(l.ch){
80+
if isLetter(l.ch) {
8181
tok.Literal = l.readIdentifier()
8282
tok.Type = token.LookupIdent(tok.Literal)
8383
return tok
84-
} else if isDigit(l.ch){
84+
} else if isDigit(l.ch) {
8585
tok.Type = token.INT
8686
tok.Literal = l.readNumber()
8787
return tok
@@ -94,44 +94,44 @@ func (l *lexer) NextToken() token.Token {
9494
return tok
9595
}
9696

97-
func newToken(tokenType token.TokenType, ch byte) token.Token{
97+
func newToken(tokenType token.TokenType, ch byte) token.Token {
9898
return token.Token{Type: tokenType, Literal: string(ch)}
9999
}
100100

101-
func (l *lexer) readIdentifier() string{
101+
func (l *Lexer) readIdentifier() string {
102102
position := l.position
103-
for isLetter(l.ch){
103+
for isLetter(l.ch) {
104104
l.readChar()
105105
}
106106
return l.input[position:l.position]
107107
}
108108

109-
func isLetter(ch byte) bool{
109+
func isLetter(ch byte) bool {
110110
return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_'
111111
}
112112

113-
func (l *lexer) skipWhitespace(){
114-
for l.ch == ' ' || l.ch == '\t' || l.ch == '\n' || l.ch == '\r'{
113+
func (l *Lexer) skipWhitespace() {
114+
for l.ch == ' ' || l.ch == '\t' || l.ch == '\n' || l.ch == '\r' {
115115
l.readChar()
116116
}
117117
}
118118

119-
func (l *lexer) readNumber() string{
119+
func (l *Lexer) readNumber() string {
120120
position := l.position
121-
for isDigit(l.ch){
121+
for isDigit(l.ch) {
122122
l.readChar()
123123
}
124124
return l.input[position:l.position]
125125
}
126126

127-
func isDigit(ch byte) bool{
127+
func isDigit(ch byte) bool {
128128
return '0' <= ch && ch <= '9'
129129
}
130130

131-
func (l *lexer) peekChar() byte{
132-
if l.readPosition >= len(l.input){
131+
func (l *Lexer) peekChar() byte {
132+
if l.readPosition >= len(l.input) {
133133
return 0
134-
}else{
134+
} else {
135135
return l.input[l.readPosition]
136136
}
137137
}

lexer/lexer_test.go

+6-6
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
package lexer
22

33
import (
4-
"testing"
54
"github.com/uchiiii/gibbon/token"
5+
"testing"
66
)
77

8-
func TestNextToken(t *testing.T){
8+
func TestNextToken(t *testing.T) {
99
input := `let five = 5;
1010
let ten = 10;
1111
@@ -27,8 +27,8 @@ func TestNextToken(t *testing.T){
2727
10 != 9;
2828
`
2929

30-
tests := []struct{
31-
expectedType token.TokenType
30+
tests := []struct {
31+
expectedType token.TokenType
3232
expectedLiteral string
3333
}{
3434
{token.LET, "let"},
@@ -109,9 +109,9 @@ func TestNextToken(t *testing.T){
109109

110110
l := New(input)
111111

112-
for i,tt := range tests {
112+
for i, tt := range tests {
113113
tok := l.NextToken()
114-
114+
115115
if tok.Type != tt.expectedType {
116116
t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q", i, tt.expectedType, tok.Type)
117117
}

main.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,14 @@ package main
22

33
import (
44
"fmt"
5+
"github.com/uchiiii/gibbon/repl"
56
"os"
67
"os/user"
7-
"github.com/uchiiii/gibbon/repl"
88
)
99

10-
func main(){
10+
func main() {
1111
user, err := user.Current()
12-
if err != nil{
12+
if err != nil {
1313
panic(err)
1414
}
1515
fmt.Printf("Hello %s! This is gibbon Programming language!\n", user.Username)

parser/parser.go

+68-8
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,91 @@
11
package parser
22

33
import (
4-
"github.com/uchiiii/gibbon/token"
54
"github.com/uchiiii/gibbon/ast"
6-
"github.com/uchiiii/gibbin/lexer"
5+
"github.com/uchiiii/gibbon/lexer"
6+
"github.com/uchiiii/gibbon/token"
77
)
88

9-
type Parser struct{
9+
type Parser struct {
1010
l *lexer.Lexer
1111

12-
curToken token.Token
12+
curToken token.Token
1313
peekToken token.Token
1414
}
1515

16-
func New(l *lexer.Lexer) *Parser{
16+
func New(l *lexer.Lexer) *Parser {
1717
p := &Parser{l: l}
1818
p.nextToken()
1919
p.nextToken()
2020

2121
return p
2222
}
2323

24-
func (p *Parser) nextToken(){
24+
func (p *Parser) nextToken() {
2525
p.curToken = p.peekToken
2626
p.peekToken = p.l.NextToken()
2727
}
2828

29-
func (p *Parser) ParseProgram() *ast.Program{
30-
return nil
29+
func (p *Parser) ParseProgram() *ast.Program {
30+
program := &ast.Program{}
31+
program.Statements = []ast.Statement{}
32+
33+
for p.curToken.Type != token.EOF {
34+
stmt := p.parseStatement()
35+
if stmt != nil {
36+
program.Statements = append(program.Statements, stmt)
37+
}
38+
p.nextToken()
39+
}
40+
41+
return program
42+
}
43+
44+
func (p *Parser) parseStatement() ast.Statement {
45+
switch p.curToken.Type {
46+
case token.LET:
47+
return p.parseLetStatement()
48+
default:
49+
return nil
50+
}
51+
}
52+
53+
func (p *Parser) parseLetStatement() *ast.LetStatement {
54+
stmt := &ast.LetStatement{Token: p.curToken}
55+
56+
if !p.expectPeek(token.IDENT) {
57+
return nil
58+
}
59+
60+
stmt.Name = &ast.Identifier{
61+
Token: p.curToken,
62+
Value: p.curToken.Literal,
63+
}
64+
65+
if !p.expectPeek(token.ASSIGN) {
66+
return nil
67+
}
68+
69+
for !p.curTokenIs(token.SEMICOLON) {
70+
p.nextToken()
71+
}
72+
73+
return stmt
74+
}
75+
76+
func (p *Parser) curTokenIs(t token.TokenType) bool {
77+
return p.curToken.Type == t
78+
}
79+
80+
func (p *Parser) peekTokenIs(t token.TokenType) bool {
81+
return p.peekToken.Type == t
82+
}
83+
84+
func (p *Parser) expectPeek(t token.TokenType) bool {
85+
if p.peekTokenIs(t) {
86+
p.nextToken()
87+
return true
88+
} else {
89+
return false
90+
}
3191
}

0 commit comments

Comments
 (0)