From a6b94b0994d1b00bcd3d672d68a7e3420317d8ca Mon Sep 17 00:00:00 2001 From: brettlangdon Date: Tue, 22 Sep 2015 13:36:05 -0400 Subject: [PATCH] reorganize some things again --- {parser => error}/error.go | 2 +- grammar/expressions.go | 128 ++++++++++---------- grammar/nodes.go | 82 +++++++++++++ parser/grammar.go => grammar/parser.go | 156 ++++++++++++------------- grammar/rules.go | 82 ------------- grammar/start.go | 10 +- grammar/statements.go | 40 +++---- grammar/tests.go | 32 ++--- main.go | 9 +- 9 files changed, 270 insertions(+), 271 deletions(-) rename {parser => error}/error.go (85%) create mode 100644 grammar/nodes.go rename parser/grammar.go => grammar/parser.go (73%) delete mode 100644 grammar/rules.go diff --git a/parser/error.go b/error/error.go similarity index 85% rename from parser/error.go rename to error/error.go index 6c00a33..ff054e5 100644 --- a/parser/error.go +++ b/error/error.go @@ -1,4 +1,4 @@ -package parser +package error type Error struct { Message string diff --git a/grammar/expressions.go b/grammar/expressions.go index 3da9946..7ca88f0 100644 --- a/grammar/expressions.go +++ b/grammar/expressions.go @@ -3,251 +3,251 @@ package grammar import "github.com/brettlangdon/gython/symbol" type TestlistStarExpressionChild interface { - Rule + Node testlistStarExpressionChild() } type TestlistStarExpression struct { - ParentRule + ParentNode } func NewTestListStarExpression() *TestlistStarExpression { rule := &TestlistStarExpression{} - rule.initBaseRule(symbol.TESTLIST_STAR_EXPR) + rule.initBaseNode(symbol.TESTLIST_STAR_EXPR) return rule } func (rule *TestlistStarExpression) expressionStatementChild() {} func (rule *TestlistStarExpression) SetChild(n TestlistStarExpressionChild) { - rule.ParentRule.SetChild(n) + rule.ParentNode.SetChild(n) } type ComparisonChild interface { - Rule + Node comparisonChild() } type Comparison struct { - ListRule + ListNode } func NewComparison() *Comparison { rule := &Comparison{} - rule.initBaseRule(symbol.COMPARISON) - rule.initListRule() + rule.initBaseNode(symbol.COMPARISON) + rule.initListNode() return rule } func (rule *Comparison) notTestChild() {} -func (rule *Comparison) Append(n ComparisonChild) { rule.ListRule.Append(n) } +func (rule *Comparison) Append(n ComparisonChild) { rule.ListNode.Append(n) } type ExpressionChild interface { - Rule + Node expressionChild() } type Expression struct { - ListRule + ListNode } func NewExpression() *Expression { rule := &Expression{} - rule.initBaseRule(symbol.EXPR) - rule.initListRule() + rule.initBaseNode(symbol.EXPR) + rule.initListNode() return rule } func (rule *Expression) comparisonChild() {} -func (rule *Expression) Append(n ExpressionChild) { rule.ListRule.Append(n) } +func (rule *Expression) Append(n ExpressionChild) { rule.ListNode.Append(n) } type XorExpressionChild interface { - Rule + Node xorExpressionChild() } type XorExpression struct { - ListRule + ListNode } func NewXorExpression() *XorExpression { rule := &XorExpression{} - rule.initBaseRule(symbol.XOR_EXPR) - rule.initListRule() + rule.initBaseNode(symbol.XOR_EXPR) + rule.initListNode() return rule } func (rule *XorExpression) expressionChild() {} -func (rule *XorExpression) Append(n XorExpressionChild) { rule.ListRule.Append(n) } +func (rule *XorExpression) Append(n XorExpressionChild) { rule.ListNode.Append(n) } type AndExpressionChild interface { - Rule + Node andExpressionChild() } type AndExpression struct { - ListRule + ListNode } func NewAndExpression() *AndExpression { rule := &AndExpression{} - rule.initBaseRule(symbol.AND_EXPR) - rule.initListRule() + rule.initBaseNode(symbol.AND_EXPR) + rule.initListNode() return rule } func (rule *AndExpression) xorExpressionChild() {} -func (rule *AndExpression) Append(n AndExpressionChild) { rule.ListRule.Append(n) } +func (rule *AndExpression) Append(n AndExpressionChild) { rule.ListNode.Append(n) } type ShiftExpressionChild interface { - Rule + Node shiftExpressionChild() } type ShiftExpression struct { - ListRule + ListNode } func NewShiftExpression() *ShiftExpression { rule := &ShiftExpression{} - rule.initBaseRule(symbol.SHIFT_EXPR) - rule.initListRule() + rule.initBaseNode(symbol.SHIFT_EXPR) + rule.initListNode() return rule } func (rule *ShiftExpression) andExpressionChild() {} -func (rule *ShiftExpression) Append(n ShiftExpressionChild) { rule.ListRule.Append(n) } +func (rule *ShiftExpression) Append(n ShiftExpressionChild) { rule.ListNode.Append(n) } type ArithmeticExpressionChild interface { - Rule + Node arithmeticExpressionChild() } type ArithmeticExpression struct { - ListRule + ListNode } func NewArithmeticExpression() *ArithmeticExpression { rule := &ArithmeticExpression{} - rule.initBaseRule(symbol.ARITH_EXPR) - rule.initListRule() + rule.initBaseNode(symbol.ARITH_EXPR) + rule.initListNode() return rule } func (rule *ArithmeticExpression) shiftExpressionChild() {} -func (rule *ArithmeticExpression) Append(n ArithmeticExpressionChild) { rule.ListRule.Append(n) } +func (rule *ArithmeticExpression) Append(n ArithmeticExpressionChild) { rule.ListNode.Append(n) } type TermChild interface { - Rule + Node termChild() } type Term struct { - ListRule + ListNode } func NewTerm() *Term { rule := &Term{} - rule.initBaseRule(symbol.TERM) - rule.initListRule() + rule.initBaseNode(symbol.TERM) + rule.initListNode() return rule } func (rule *Term) arithmeticExpressionChild() {} -func (rule *Term) Append(n TermChild) { rule.ListRule.Append(n) } +func (rule *Term) Append(n TermChild) { rule.ListNode.Append(n) } type FactorChild interface { - Rule + Node factorChild() } type Factor struct { - ListRule + ListNode } func NewFactor() *Factor { rule := &Factor{} - rule.initBaseRule(symbol.FACTOR) - rule.initListRule() + rule.initBaseNode(symbol.FACTOR) + rule.initListNode() return rule } func (rule *Factor) factorChild() {} func (rule *Factor) powerChild() {} func (rule *Factor) termChild() {} -func (rule *Factor) Append(n FactorChild) { rule.ListRule.Append(n) } +func (rule *Factor) Append(n FactorChild) { rule.ListNode.Append(n) } type PowerChild interface { - Rule + Node powerChild() } type Power struct { - ListRule + ListNode } func NewPower() *Power { rule := &Power{} - rule.initBaseRule(symbol.POWER) - rule.initListRule() + rule.initBaseNode(symbol.POWER) + rule.initListNode() return rule } func (rule *Power) factorChild() {} -func (rule *Power) Append(n PowerChild) { rule.ListRule.Append(n) } +func (rule *Power) Append(n PowerChild) { rule.ListNode.Append(n) } type AtomExpressionChild interface { - Rule + Node atomExpressionChild() } type AtomExpression struct { - ListRule + ListNode } func NewAtomExpression() *AtomExpression { rule := &AtomExpression{} - rule.initBaseRule(symbol.ATOM_EXPR) - rule.initListRule() + rule.initBaseNode(symbol.ATOM_EXPR) + rule.initListNode() return rule } func (rule *AtomExpression) powerChild() {} -func (rule *AtomExpression) Append(n AtomExpressionChild) { rule.ListRule.Append(n) } +func (rule *AtomExpression) Append(n AtomExpressionChild) { rule.ListNode.Append(n) } type AtomChild interface { - Rule + Node atomChild() } type Atom struct { - ListRule + ListNode } func NewAtom() *Atom { rule := &Atom{} - rule.initBaseRule(symbol.ATOM) - rule.initListRule() + rule.initBaseNode(symbol.ATOM) + rule.initListNode() return rule } func (rule *Atom) atomExpressionChild() {} -func (rule *Atom) Append(n AtomChild) { rule.ListRule.Append(n) } +func (rule *Atom) Append(n AtomChild) { rule.ListNode.Append(n) } type TrailerChild interface { - Rule + Node trailerChild() } type Trailer struct { - ListRule + ListNode } func NewTrailer() *Trailer { rule := &Trailer{} - rule.initBaseRule(symbol.TRAILER) - rule.initListRule() + rule.initBaseNode(symbol.TRAILER) + rule.initListNode() return rule } func (rule *Trailer) atomExpressionChild() {} -func (rule *Trailer) Append(n TrailerChild) { rule.ListRule.Append(n) } +func (rule *Trailer) Append(n TrailerChild) { rule.ListNode.Append(n) } diff --git a/grammar/nodes.go b/grammar/nodes.go new file mode 100644 index 0000000..4b91e28 --- /dev/null +++ b/grammar/nodes.go @@ -0,0 +1,82 @@ +package grammar + +import ( + "fmt" + + "github.com/brettlangdon/gython/symbol" + "github.com/brettlangdon/gython/token" +) + +type Node interface { + Name() string + Repr() []interface{} +} + +type TokenNode struct { + Token *token.Token +} + +func NewTokenNode(tok *token.Token) *TokenNode { + return &TokenNode{ + Token: tok, + } +} +func (rule *TokenNode) atomChild() {} +func (rule *TokenNode) atomExpressionChild() {} +func (rule *TokenNode) comparisonChild() {} +func (rule *TokenNode) expressionStatementChild() {} +func (rule *TokenNode) factorChild() {} +func (rule *TokenNode) fileInputChild() {} +func (rule *TokenNode) shiftExpressionChild() {} +func (rule *TokenNode) simpleStatementChild() {} +func (rule *TokenNode) trailerChild() {} +func (rule *TokenNode) Name() string { return token.TokenNames[rule.Token.ID] } +func (rule *TokenNode) Repr() []interface{} { + parts := make([]interface{}, 0) + parts = append(parts, rule.Name()) + literal := fmt.Sprintf("%#v", rule.Token.Literal) + return append(parts, literal) +} + +type BaseNode struct { + ID symbol.SymbolID + child Node +} + +func (rule *BaseNode) initBaseNode(id symbol.SymbolID) { rule.ID = id } +func (rule *BaseNode) Name() string { return symbol.SymbolNames[rule.ID] } +func (rule *BaseNode) Repr() (parts []interface{}) { return append(parts, rule.Name()) } + +type ParentNode struct { + BaseNode + child Node +} + +func (rule *ParentNode) SetChild(n Node) { rule.child = n } +func (rule *ParentNode) Child() Node { return rule.child } +func (rule *ParentNode) Repr() (parts []interface{}) { + parts = rule.BaseNode.Repr() + child := rule.Child() + if child != nil { + parts = append(parts, child.Repr()) + } + return parts +} + +type ListNode struct { + BaseNode + children []Node +} + +func (rule *ListNode) initListNode() { rule.children = make([]Node, 0) } +func (rule *ListNode) Length() int { return len(rule.children) } +func (rule *ListNode) Children() []Node { return rule.children } +func (rule *ListNode) Append(n Node) { rule.children = append(rule.children, n) } +func (rule *ListNode) Repr() (parts []interface{}) { + parts = rule.BaseNode.Repr() + children := rule.Children() + for _, child := range children { + parts = append(parts, child.Repr()) + } + return parts +} diff --git a/parser/grammar.go b/grammar/parser.go similarity index 73% rename from parser/grammar.go rename to grammar/parser.go index 9bbdf52..d1a0316 100644 --- a/parser/grammar.go +++ b/grammar/parser.go @@ -1,16 +1,14 @@ -package parser +package grammar import ( - "io" - + "github.com/brettlangdon/gython/error" "github.com/brettlangdon/gython/errorcode" - "github.com/brettlangdon/gython/grammar" "github.com/brettlangdon/gython/scanner" "github.com/brettlangdon/gython/token" ) type GrammarParser struct { - Errors []*Error + Errors []*error.Error tokenizer *scanner.Scanner tokenBuffer []*token.Token } @@ -31,7 +29,7 @@ func (parser *GrammarParser) unreadToken(tok *token.Token) { } func (parser *GrammarParser) addError(msg string) { - parser.Errors = append(parser.Errors, &Error{ + parser.Errors = append(parser.Errors, &error.Error{ Message: msg, }) } @@ -57,8 +55,8 @@ func (parser *GrammarParser) expectLiteral(literal string) bool { } // compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt -func (parser *GrammarParser) parseCompoundStatement() *grammar.CompoundStatement { - compoundStmt := grammar.NewCompoundStatement() +func (parser *GrammarParser) parseCompoundStatement() *CompoundStatement { + compoundStmt := NewCompoundStatement() return compoundStmt } @@ -66,29 +64,29 @@ func (parser *GrammarParser) parseCompoundStatement() *grammar.CompoundStatement // '[' [testlist_comp] ']' | // '{' [dictorsetmaker] '}' | // NAME | NUMBER | STRING+ | '...' | 'None' | 'True' | 'False') -func (parser *GrammarParser) parseAtom() *grammar.Atom { - atom := grammar.NewAtom() +func (parser *GrammarParser) parseAtom() *Atom { + atom := NewAtom() next := parser.nextToken() switch next.ID { case token.NAME, token.NUMBER, token.ELLIPSIS: - atom.Append(grammar.NewTokenRule(next)) + atom.Append(NewTokenNode(next)) case token.STRING: - atom.Append(grammar.NewTokenRule(next)) + atom.Append(NewTokenNode(next)) for { next := parser.nextToken() if next.ID != token.STRING { parser.unreadToken(next) break } - atom.Append(grammar.NewTokenRule(next)) + atom.Append(NewTokenNode(next)) } } return atom } // trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME -func (parser *GrammarParser) parseTrailer() *grammar.Trailer { - trailer := grammar.NewTrailer() +func (parser *GrammarParser) parseTrailer() *Trailer { + trailer := NewTrailer() next := parser.nextToken() switch next.ID { case token.LPAR: @@ -96,20 +94,20 @@ func (parser *GrammarParser) parseTrailer() *grammar.Trailer { if next2.ID != token.RPAR { return nil } - trailer.Append(grammar.NewTokenRule(next)) - trailer.Append(grammar.NewTokenRule(next2)) + trailer.Append(NewTokenNode(next)) + trailer.Append(NewTokenNode(next2)) case token.LBRACE: next2 := parser.nextToken() if next2.ID != token.RBRACE { return nil } - trailer.Append(grammar.NewTokenRule(next)) - trailer.Append(grammar.NewTokenRule(next2)) + trailer.Append(NewTokenNode(next)) + trailer.Append(NewTokenNode(next2)) case token.DOT: next2 := parser.nextToken() if next2.ID == token.NAME { - trailer.Append(grammar.NewTokenRule(next)) - trailer.Append(grammar.NewTokenRule(next2)) + trailer.Append(NewTokenNode(next)) + trailer.Append(NewTokenNode(next2)) } else { parser.addError("Expected \"NAME\" instead found \"" + next.ID.String() + "\"") return nil @@ -122,11 +120,11 @@ func (parser *GrammarParser) parseTrailer() *grammar.Trailer { } // atom_expr: [AWAIT] atom trailer* -func (parser *GrammarParser) parseAtomExpression() *grammar.AtomExpression { - expr := grammar.NewAtomExpression() +func (parser *GrammarParser) parseAtomExpression() *AtomExpression { + expr := NewAtomExpression() next := parser.nextToken() if next.ID == token.AWAIT { - expr.Append(grammar.NewTokenRule(next)) + expr.Append(NewTokenNode(next)) } else { parser.unreadToken(next) } @@ -148,8 +146,8 @@ func (parser *GrammarParser) parseAtomExpression() *grammar.AtomExpression { } // power: atom_expr ['**' factor] -func (parser *GrammarParser) parsePower() *grammar.Power { - power := grammar.NewPower() +func (parser *GrammarParser) parsePower() *Power { + power := NewPower() atomExpr := parser.parseAtomExpression() if atomExpr == nil { return nil @@ -171,8 +169,8 @@ func (parser *GrammarParser) parsePower() *grammar.Power { } // factor: ('+'|'-'|'~') factor | power -func (parser *GrammarParser) parseFactor() *grammar.Factor { - factor := grammar.NewFactor() +func (parser *GrammarParser) parseFactor() *Factor { + factor := NewFactor() next := parser.nextToken() switch next.ID { case token.PLUS, token.MINUS, token.TILDE: @@ -180,7 +178,7 @@ func (parser *GrammarParser) parseFactor() *grammar.Factor { if node == nil { return nil } - factor.Append(grammar.NewTokenRule(next)) + factor.Append(NewTokenNode(next)) factor.Append(node) default: parser.unreadToken(next) @@ -195,8 +193,8 @@ func (parser *GrammarParser) parseFactor() *grammar.Factor { } // term: factor (('*'|'@'|'/'|'%'|'//') factor)* -func (parser *GrammarParser) parseTerm() *grammar.Term { - term := grammar.NewTerm() +func (parser *GrammarParser) parseTerm() *Term { + term := NewTerm() factor := parser.parseFactor() if factor == nil { return nil @@ -218,8 +216,8 @@ func (parser *GrammarParser) parseTerm() *grammar.Term { } // arith_expr: term (('+'|'-') term)* -func (parser *GrammarParser) parseArithmetricExpression() *grammar.ArithmeticExpression { - expr := grammar.NewArithmeticExpression() +func (parser *GrammarParser) parseArithmetricExpression() *ArithmeticExpression { + expr := NewArithmeticExpression() term := parser.parseTerm() if term == nil { return nil @@ -241,8 +239,8 @@ func (parser *GrammarParser) parseArithmetricExpression() *grammar.ArithmeticExp } // shift_expr: arith_expr (('<<'|'>>') arith_expr)* -func (parser *GrammarParser) parseShiftExpression() *grammar.ShiftExpression { - expr := grammar.NewShiftExpression() +func (parser *GrammarParser) parseShiftExpression() *ShiftExpression { + expr := NewShiftExpression() arithExpr := parser.parseArithmetricExpression() if arithExpr == nil { return nil @@ -254,7 +252,7 @@ func (parser *GrammarParser) parseShiftExpression() *grammar.ShiftExpression { parser.unreadToken(next) break } - expr.Append(grammar.NewTokenRule(next)) + expr.Append(NewTokenNode(next)) arithExpr := parser.parseArithmetricExpression() if arithExpr == nil { return nil @@ -265,8 +263,8 @@ func (parser *GrammarParser) parseShiftExpression() *grammar.ShiftExpression { } // and_expr: shift_expr ('&' shift_expr)* -func (parser *GrammarParser) parseAndExpression() *grammar.AndExpression { - expr := grammar.NewAndExpression() +func (parser *GrammarParser) parseAndExpression() *AndExpression { + expr := NewAndExpression() shiftExpr := parser.parseShiftExpression() if shiftExpr == nil { return nil @@ -288,8 +286,8 @@ func (parser *GrammarParser) parseAndExpression() *grammar.AndExpression { } // xor_expr: and_expr ('^' and_expr)* -func (parser *GrammarParser) parseXorExpression() *grammar.XorExpression { - expr := grammar.NewXorExpression() +func (parser *GrammarParser) parseXorExpression() *XorExpression { + expr := NewXorExpression() andExpr := parser.parseAndExpression() if andExpr == nil { return nil @@ -311,8 +309,8 @@ func (parser *GrammarParser) parseXorExpression() *grammar.XorExpression { } // expr: xor_expr ('|' xor_expr)* -func (parser *GrammarParser) parseExpression() *grammar.Expression { - expr := grammar.NewExpression() +func (parser *GrammarParser) parseExpression() *Expression { + expr := NewExpression() xorExpr := parser.parseXorExpression() if xorExpr == nil { return nil @@ -334,8 +332,8 @@ func (parser *GrammarParser) parseExpression() *grammar.Expression { } // comparison: expr (comp_op expr)* -func (parser *GrammarParser) parseComparison() *grammar.Comparison { - comparison := grammar.NewComparison() +func (parser *GrammarParser) parseComparison() *Comparison { + comparison := NewComparison() expr := parser.parseExpression() if expr == nil { return nil @@ -348,20 +346,20 @@ func (parser *GrammarParser) parseComparison() *grammar.Comparison { next := parser.nextToken() switch next.Literal { case "<", ">", "==", ">=", "<=", "<>", "!=", "in": - comparison.Append(grammar.NewTokenRule(next)) + comparison.Append(NewTokenNode(next)) case "is": - comparison.Append(grammar.NewTokenRule(next)) + comparison.Append(NewTokenNode(next)) next2 := parser.nextToken() if next2.Literal == "not" { - comparison.Append(grammar.NewTokenRule(next2)) + comparison.Append(NewTokenNode(next2)) } else { parser.unreadToken(next2) } case "not": next2 := parser.nextToken() if next2.Literal == "in" { - comparison.Append(grammar.NewTokenRule(next)) - comparison.Append(grammar.NewTokenRule(next2)) + comparison.Append(NewTokenNode(next)) + comparison.Append(NewTokenNode(next2)) } else { parser.unreadToken(next2) parser.unreadToken(next) @@ -385,8 +383,8 @@ func (parser *GrammarParser) parseComparison() *grammar.Comparison { } // not_test: 'not' not_test | comparison -func (parser *GrammarParser) parseNotTest() *grammar.NotTest { - notTest := grammar.NewNotTest() +func (parser *GrammarParser) parseNotTest() *NotTest { + notTest := NewNotTest() next := parser.nextToken() if next.IsLiteral("not") { test := parser.parseNotTest() @@ -406,8 +404,8 @@ func (parser *GrammarParser) parseNotTest() *grammar.NotTest { } // and_test: not_test ('and' not_test)* -func (parser *GrammarParser) parseAndTest() *grammar.AndTest { - andTest := grammar.NewAndTest() +func (parser *GrammarParser) parseAndTest() *AndTest { + andTest := NewAndTest() notTest := parser.parseNotTest() if notTest == nil { return nil @@ -430,8 +428,8 @@ func (parser *GrammarParser) parseAndTest() *grammar.AndTest { } // or_test: and_test ('or' and_test)* -func (parser *GrammarParser) parseOrTest() *grammar.OrTest { - orTest := grammar.NewOrTest() +func (parser *GrammarParser) parseOrTest() *OrTest { + orTest := NewOrTest() andTest := parser.parseAndTest() if andTest == nil { return nil @@ -453,8 +451,8 @@ func (parser *GrammarParser) parseOrTest() *grammar.OrTest { } // test: or_test ['if' or_test 'else' test] | lambdef -func (parser *GrammarParser) parseTest() *grammar.Test { - test := grammar.NewTest() +func (parser *GrammarParser) parseTest() *Test { + test := NewTest() orTest := parser.parseOrTest() if orTest != nil { @@ -483,10 +481,10 @@ func (parser *GrammarParser) parseTest() *grammar.Test { } // testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] -func (parser *GrammarParser) parseTestlistStarExpression() *grammar.TestlistStarExpression { - testlistStarExpression := grammar.NewTestListStarExpression() +func (parser *GrammarParser) parseTestlistStarExpression() *TestlistStarExpression { + testlistStarExpression := NewTestListStarExpression() - var expr grammar.TestlistStarExpressionChild + var expr TestlistStarExpressionChild expr = parser.parseTest() if expr == nil { return nil @@ -497,8 +495,8 @@ func (parser *GrammarParser) parseTestlistStarExpression() *grammar.TestlistStar // expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) | // ('=' (yield_expr|testlist_star_expr))*) -func (parser *GrammarParser) parseExpressionStatement() *grammar.ExpressionStatement { - exprStmt := grammar.NewExpressionStatement() +func (parser *GrammarParser) parseExpressionStatement() *ExpressionStatement { + exprStmt := NewExpressionStatement() expr := parser.parseTestlistStarExpression() if expr == nil { return nil @@ -513,7 +511,7 @@ func (parser *GrammarParser) parseExpressionStatement() *grammar.ExpressionState parser.unreadToken(next) break } - exprStmt.Append(grammar.NewTokenRule(next)) + exprStmt.Append(NewTokenNode(next)) expr := parser.parseTestlistStarExpression() if expr == nil { return nil @@ -527,10 +525,10 @@ func (parser *GrammarParser) parseExpressionStatement() *grammar.ExpressionState // small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | // import_stmt | global_stmt | nonlocal_stmt | assert_stmt) -func (parser *GrammarParser) parseSmallStatment() *grammar.SmallStatement { - smallStmt := grammar.NewSmallStatement() +func (parser *GrammarParser) parseSmallStatment() *SmallStatement { + smallStmt := NewSmallStatement() - var stmt grammar.SmallStatementChild + var stmt SmallStatementChild stmt = parser.parseExpressionStatement() if stmt != nil { smallStmt.SetChild(stmt) @@ -543,8 +541,8 @@ func (parser *GrammarParser) parseSmallStatment() *grammar.SmallStatement { } // simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE -func (parser *GrammarParser) parseSimpleStatement() *grammar.SimpleStatement { - simpleStmt := grammar.NewSimpleStatement() +func (parser *GrammarParser) parseSimpleStatement() *SimpleStatement { + simpleStmt := NewSimpleStatement() for { smallStmt := parser.parseSmallStatment() if smallStmt == nil { @@ -562,7 +560,7 @@ func (parser *GrammarParser) parseSimpleStatement() *grammar.SimpleStatement { parser.addError("Expected \"NEWLINE\" instead found \"" + next.ID.String() + "\"") return nil } - simpleStmt.Append(grammar.NewTokenRule(next)) + simpleStmt.Append(NewTokenNode(next)) // no small statements found if simpleStmt.Length() == 0 { @@ -572,8 +570,8 @@ func (parser *GrammarParser) parseSimpleStatement() *grammar.SimpleStatement { } // stmt: simple_stmt | compound_stmt -func (parser *GrammarParser) parseStatement() *grammar.Statement { - var next grammar.StatementChild +func (parser *GrammarParser) parseStatement() *Statement { + var next StatementChild next = parser.parseSimpleStatement() if next == nil { next = parser.parseCompoundStatement() @@ -583,18 +581,18 @@ func (parser *GrammarParser) parseStatement() *grammar.Statement { return nil } - stmt := grammar.NewStatement() + stmt := NewStatement() stmt.SetChild(next) return stmt } // file_input: (NEWLINE | stmt)* ENDMARKER -func (parser *GrammarParser) parseFileInput() *grammar.FileInput { - root := grammar.NewFileInput() +func (parser *GrammarParser) parseFileInput() *FileInput { + root := NewFileInput() for parser.tokenizer.State() == errorcode.E_OK { next := parser.nextToken() if next.ID == token.NEWLINE { - root.Append(grammar.NewTokenRule(next)) + root.Append(NewTokenNode(next)) } else if next.ID == token.ENDMARKER { // Unread, so we can read in the expected value later parser.unreadToken(next) @@ -614,18 +612,18 @@ func (parser *GrammarParser) parseFileInput() *grammar.FileInput { parser.addError("Expected \"ENDMARKER\" instead received \"" + next.ID.String() + "\"") return nil } - root.Append(grammar.NewTokenRule(next)) + root.Append(NewTokenNode(next)) return root } -func NewGrammarParser(r io.Reader) *GrammarParser { +func NewGrammarParser(s *scanner.Scanner) *GrammarParser { return &GrammarParser{ - tokenizer: scanner.NewScanner(r), + tokenizer: s, tokenBuffer: make([]*token.Token, 0), } } -func (parser *GrammarParser) Parse() *grammar.FileInput { +func (parser *GrammarParser) Parse() *FileInput { return parser.parseFileInput() } diff --git a/grammar/rules.go b/grammar/rules.go deleted file mode 100644 index 8e4c05c..0000000 --- a/grammar/rules.go +++ /dev/null @@ -1,82 +0,0 @@ -package grammar - -import ( - "fmt" - - "github.com/brettlangdon/gython/symbol" - "github.com/brettlangdon/gython/token" -) - -type Rule interface { - Name() string - Repr() []interface{} -} - -type TokenRule struct { - Token *token.Token -} - -func NewTokenRule(tok *token.Token) *TokenRule { - return &TokenRule{ - Token: tok, - } -} -func (rule *TokenRule) atomChild() {} -func (rule *TokenRule) atomExpressionChild() {} -func (rule *TokenRule) comparisonChild() {} -func (rule *TokenRule) expressionStatementChild() {} -func (rule *TokenRule) factorChild() {} -func (rule *TokenRule) fileInputChild() {} -func (rule *TokenRule) shiftExpressionChild() {} -func (rule *TokenRule) simpleStatementChild() {} -func (rule *TokenRule) trailerChild() {} -func (rule *TokenRule) Name() string { return token.TokenNames[rule.Token.ID] } -func (rule *TokenRule) Repr() []interface{} { - parts := make([]interface{}, 0) - parts = append(parts, rule.Name()) - literal := fmt.Sprintf("%#v", rule.Token.Literal) - return append(parts, literal) -} - -type BaseRule struct { - ID symbol.SymbolID - child Rule -} - -func (rule *BaseRule) initBaseRule(id symbol.SymbolID) { rule.ID = id } -func (rule *BaseRule) Name() string { return symbol.SymbolNames[rule.ID] } -func (rule *BaseRule) Repr() (parts []interface{}) { return append(parts, rule.Name()) } - -type ParentRule struct { - BaseRule - child Rule -} - -func (rule *ParentRule) SetChild(n Rule) { rule.child = n } -func (rule *ParentRule) Child() Rule { return rule.child } -func (rule *ParentRule) Repr() (parts []interface{}) { - parts = rule.BaseRule.Repr() - child := rule.Child() - if child != nil { - parts = append(parts, child.Repr()) - } - return parts -} - -type ListRule struct { - BaseRule - children []Rule -} - -func (rule *ListRule) initListRule() { rule.children = make([]Rule, 0) } -func (rule *ListRule) Length() int { return len(rule.children) } -func (rule *ListRule) Children() []Rule { return rule.children } -func (rule *ListRule) Append(n Rule) { rule.children = append(rule.children, n) } -func (rule *ListRule) Repr() (parts []interface{}) { - parts = rule.BaseRule.Repr() - children := rule.Children() - for _, child := range children { - parts = append(parts, child.Repr()) - } - return parts -} diff --git a/grammar/start.go b/grammar/start.go index f6f1d1e..b553e5b 100644 --- a/grammar/start.go +++ b/grammar/start.go @@ -3,19 +3,19 @@ package grammar import "github.com/brettlangdon/gython/symbol" type FileInputChild interface { - Rule + Node fileInputChild() } type FileInput struct { - ListRule + ListNode } func NewFileInput() *FileInput { rule := &FileInput{} - rule.initBaseRule(symbol.FILE_INPUT) - rule.initListRule() + rule.initBaseNode(symbol.FILE_INPUT) + rule.initListNode() return rule } -func (rule *FileInput) Append(n FileInputChild) { rule.ListRule.Append(n) } +func (rule *FileInput) Append(n FileInputChild) { rule.ListNode.Append(n) } diff --git a/grammar/statements.go b/grammar/statements.go index 0398ea0..bfc7aff 100644 --- a/grammar/statements.go +++ b/grammar/statements.go @@ -3,87 +3,87 @@ package grammar import "github.com/brettlangdon/gython/symbol" type StatementChild interface { - Rule + Node stmtChild() } type Statement struct { - ParentRule + ParentNode } func NewStatement() *Statement { rule := &Statement{} - rule.initBaseRule(symbol.STMT) + rule.initBaseNode(symbol.STMT) return rule } func (rule *Statement) fileInputChild() {} -func (rule *Statement) SetChild(n StatementChild) { rule.ParentRule.SetChild(n) } +func (rule *Statement) SetChild(n StatementChild) { rule.ParentNode.SetChild(n) } type SimpleStatementChild interface { - Rule + Node simpleStatementChild() } type SimpleStatement struct { - ListRule + ListNode } func NewSimpleStatement() *SimpleStatement { rule := &SimpleStatement{} - rule.initBaseRule(symbol.SIMPLE_STMT) - rule.initListRule() + rule.initBaseNode(symbol.SIMPLE_STMT) + rule.initListNode() return rule } func (rule *SimpleStatement) stmtChild() {} -func (rule *SimpleStatement) Append(n SimpleStatementChild) { rule.ListRule.Append(n) } +func (rule *SimpleStatement) Append(n SimpleStatementChild) { rule.ListNode.Append(n) } type CompoundStatement struct { - BaseRule + BaseNode } func NewCompoundStatement() *CompoundStatement { rule := &CompoundStatement{} - rule.initBaseRule(symbol.COMPOUND_STMT) + rule.initBaseNode(symbol.COMPOUND_STMT) return rule } func (rule *CompoundStatement) stmtChild() {} type SmallStatementChild interface { - Rule + Node smallStmtChild() } type SmallStatement struct { - ParentRule + ParentNode } func NewSmallStatement() *SmallStatement { rule := &SmallStatement{} - rule.initBaseRule(symbol.SMALL_STMT) + rule.initBaseNode(symbol.SMALL_STMT) return rule } func (rule *SmallStatement) simpleStatementChild() {} -func (rule *SmallStatement) SetChild(n SmallStatementChild) { rule.ParentRule.SetChild(n) } +func (rule *SmallStatement) SetChild(n SmallStatementChild) { rule.ParentNode.SetChild(n) } type ExpressionStatementChild interface { - Rule + Node expressionStatementChild() } type ExpressionStatement struct { - ListRule + ListNode Expression *TestlistStarExpression } func NewExpressionStatement() *ExpressionStatement { rule := &ExpressionStatement{} - rule.initBaseRule(symbol.EXPR_STMT) - rule.initListRule() + rule.initBaseNode(symbol.EXPR_STMT) + rule.initListNode() return rule } func (rule *ExpressionStatement) smallStmtChild() {} -func (rule *ExpressionStatement) Append(n ExpressionStatementChild) { rule.ListRule.Append(n) } +func (rule *ExpressionStatement) Append(n ExpressionStatementChild) { rule.ListNode.Append(n) } diff --git a/grammar/tests.go b/grammar/tests.go index 0f51bd7..3cc83f8 100644 --- a/grammar/tests.go +++ b/grammar/tests.go @@ -3,75 +3,75 @@ package grammar import "github.com/brettlangdon/gython/symbol" type TestChild interface { - Rule + Node testChild() } type Test struct { - ListRule + ListNode } func NewTest() *Test { rule := &Test{} - rule.initBaseRule(symbol.TEST) + rule.initBaseNode(symbol.TEST) return rule } func (rule *Test) testlistStarExpressionChild() {} func (rule *Test) testChild() {} -func (rule *Test) Append(n TestChild) { rule.ListRule.Append(n) } +func (rule *Test) Append(n TestChild) { rule.ListNode.Append(n) } type OrTestChild interface { - Rule + Node orTestChild() } type OrTest struct { - ListRule + ListNode } func NewOrTest() *OrTest { rule := &OrTest{} - rule.initBaseRule(symbol.OR_TEST) + rule.initBaseNode(symbol.OR_TEST) return rule } func (rule *OrTest) testChild() {} -func (rule *OrTest) Append(n OrTestChild) { rule.ListRule.Append(n) } +func (rule *OrTest) Append(n OrTestChild) { rule.ListNode.Append(n) } type AndTestChild interface { - Rule + Node andTestChild() } type AndTest struct { - ListRule + ListNode } func NewAndTest() *AndTest { rule := &AndTest{} - rule.initBaseRule(symbol.AND_TEST) + rule.initBaseNode(symbol.AND_TEST) return rule } func (rule *AndTest) orTestChild() {} -func (rule *AndTest) Append(n AndTestChild) { rule.ListRule.Append(n) } +func (rule *AndTest) Append(n AndTestChild) { rule.ListNode.Append(n) } type NotTestChild interface { - Rule + Node notTestChild() } type NotTest struct { - ParentRule + ParentNode } func NewNotTest() *NotTest { rule := &NotTest{} - rule.initBaseRule(symbol.NOT_TEST) + rule.initBaseNode(symbol.NOT_TEST) return rule } func (rule *NotTest) notTestChild() {} func (rule *NotTest) andTestChild() {} -func (rule *NotTest) SetChild(n NotTestChild) { rule.ParentRule.SetChild(n) } +func (rule *NotTest) SetChild(n NotTestChild) { rule.ParentNode.SetChild(n) } diff --git a/main.go b/main.go index 8d594d5..4719de1 100644 --- a/main.go +++ b/main.go @@ -4,7 +4,7 @@ import ( "fmt" "os" - "github.com/brettlangdon/gython/parser" + "github.com/brettlangdon/gython/grammar" "github.com/brettlangdon/gython/scanner" "github.com/brettlangdon/gython/token" ) @@ -22,13 +22,14 @@ func tokenize() { } } -func parse() { - gp := parser.NewGrammarParser(os.Stdin) +func parseGrammar() { + tokenizer := scanner.NewScanner(os.Stdin) + gp := grammar.NewGrammarParser(tokenizer) root := gp.Parse() fmt.Println(gp) fmt.Println(root.Repr()) } func main() { - parse() + parseGrammar() }