diff --git a/grammar/expressions.go b/grammar/expressions.go index aeb3884..3da9946 100644 --- a/grammar/expressions.go +++ b/grammar/expressions.go @@ -1,251 +1,253 @@ package grammar -type TestlistStarExpressionChildNode interface { - Node +import "github.com/brettlangdon/gython/symbol" + +type TestlistStarExpressionChild interface { + Rule testlistStarExpressionChild() } type TestlistStarExpression struct { - ParentNode + ParentRule } func NewTestListStarExpression() *TestlistStarExpression { - node := &TestlistStarExpression{} - node.initBaseNode(TESTLIST_STAR_EXPR) - return node + rule := &TestlistStarExpression{} + rule.initBaseRule(symbol.TESTLIST_STAR_EXPR) + return rule } -func (node *TestlistStarExpression) expressionStatementChild() {} -func (node *TestlistStarExpression) SetChild(n TestlistStarExpressionChildNode) { - node.ParentNode.SetChild(n) +func (rule *TestlistStarExpression) expressionStatementChild() {} +func (rule *TestlistStarExpression) SetChild(n TestlistStarExpressionChild) { + rule.ParentRule.SetChild(n) } -type ComparisonChildNode interface { - Node +type ComparisonChild interface { + Rule comparisonChild() } type Comparison struct { - ListNode + ListRule } func NewComparison() *Comparison { - node := &Comparison{} - node.initBaseNode(COMPARISON) - node.initListNode() - return node + rule := &Comparison{} + rule.initBaseRule(symbol.COMPARISON) + rule.initListRule() + return rule } -func (node *Comparison) notTestChild() {} -func (node *Comparison) Append(n ComparisonChildNode) { node.ListNode.Append(n) } +func (rule *Comparison) notTestChild() {} +func (rule *Comparison) Append(n ComparisonChild) { rule.ListRule.Append(n) } -type ExpressionChildNode interface { - Node +type ExpressionChild interface { + Rule expressionChild() } type Expression struct { - ListNode + ListRule } func NewExpression() *Expression { - node := &Expression{} - node.initBaseNode(EXPR) - node.initListNode() - return node + rule := &Expression{} + rule.initBaseRule(symbol.EXPR) + rule.initListRule() + return rule } -func (node *Expression) comparisonChild() {} -func (node *Expression) Append(n ExpressionChildNode) { node.ListNode.Append(n) } +func (rule *Expression) comparisonChild() {} +func (rule *Expression) Append(n ExpressionChild) { rule.ListRule.Append(n) } -type XorExpressionChildNode interface { - Node +type XorExpressionChild interface { + Rule xorExpressionChild() } type XorExpression struct { - ListNode + ListRule } func NewXorExpression() *XorExpression { - node := &XorExpression{} - node.initBaseNode(XOR_EXPR) - node.initListNode() - return node + rule := &XorExpression{} + rule.initBaseRule(symbol.XOR_EXPR) + rule.initListRule() + return rule } -func (node *XorExpression) expressionChild() {} -func (node *XorExpression) Append(n XorExpressionChildNode) { node.ListNode.Append(n) } +func (rule *XorExpression) expressionChild() {} +func (rule *XorExpression) Append(n XorExpressionChild) { rule.ListRule.Append(n) } -type AndExpressionChildNode interface { - Node +type AndExpressionChild interface { + Rule andExpressionChild() } type AndExpression struct { - ListNode + ListRule } func NewAndExpression() *AndExpression { - node := &AndExpression{} - node.initBaseNode(AND_EXPR) - node.initListNode() - return node + rule := &AndExpression{} + rule.initBaseRule(symbol.AND_EXPR) + rule.initListRule() + return rule } -func (node *AndExpression) xorExpressionChild() {} -func (node *AndExpression) Append(n AndExpressionChildNode) { node.ListNode.Append(n) } +func (rule *AndExpression) xorExpressionChild() {} +func (rule *AndExpression) Append(n AndExpressionChild) { rule.ListRule.Append(n) } -type ShiftExpressionChildNode interface { - Node +type ShiftExpressionChild interface { + Rule shiftExpressionChild() } type ShiftExpression struct { - ListNode + ListRule } func NewShiftExpression() *ShiftExpression { - node := &ShiftExpression{} - node.initBaseNode(SHIFT_EXPR) - node.initListNode() - return node + rule := &ShiftExpression{} + rule.initBaseRule(symbol.SHIFT_EXPR) + rule.initListRule() + return rule } -func (node *ShiftExpression) andExpressionChild() {} -func (node *ShiftExpression) Append(n ShiftExpressionChildNode) { node.ListNode.Append(n) } +func (rule *ShiftExpression) andExpressionChild() {} +func (rule *ShiftExpression) Append(n ShiftExpressionChild) { rule.ListRule.Append(n) } -type ArithmeticExpressionChildNode interface { - Node +type ArithmeticExpressionChild interface { + Rule arithmeticExpressionChild() } type ArithmeticExpression struct { - ListNode + ListRule } func NewArithmeticExpression() *ArithmeticExpression { - node := &ArithmeticExpression{} - node.initBaseNode(ARITH_EXPR) - node.initListNode() - return node + rule := &ArithmeticExpression{} + rule.initBaseRule(symbol.ARITH_EXPR) + rule.initListRule() + return rule } -func (node *ArithmeticExpression) shiftExpressionChild() {} -func (node *ArithmeticExpression) Append(n ArithmeticExpressionChildNode) { node.ListNode.Append(n) } +func (rule *ArithmeticExpression) shiftExpressionChild() {} +func (rule *ArithmeticExpression) Append(n ArithmeticExpressionChild) { rule.ListRule.Append(n) } -type TermChildNode interface { - Node +type TermChild interface { + Rule termChild() } type Term struct { - ListNode + ListRule } func NewTerm() *Term { - node := &Term{} - node.initBaseNode(TERM) - node.initListNode() - return node + rule := &Term{} + rule.initBaseRule(symbol.TERM) + rule.initListRule() + return rule } -func (node *Term) arithmeticExpressionChild() {} -func (node *Term) Append(n TermChildNode) { node.ListNode.Append(n) } +func (rule *Term) arithmeticExpressionChild() {} +func (rule *Term) Append(n TermChild) { rule.ListRule.Append(n) } -type FactorChildNode interface { - Node +type FactorChild interface { + Rule factorChild() } type Factor struct { - ListNode + ListRule } func NewFactor() *Factor { - node := &Factor{} - node.initBaseNode(FACTOR) - node.initListNode() - return node + rule := &Factor{} + rule.initBaseRule(symbol.FACTOR) + rule.initListRule() + return rule } -func (node *Factor) factorChild() {} -func (node *Factor) powerChild() {} -func (node *Factor) termChild() {} -func (node *Factor) Append(n FactorChildNode) { node.ListNode.Append(n) } +func (rule *Factor) factorChild() {} +func (rule *Factor) powerChild() {} +func (rule *Factor) termChild() {} +func (rule *Factor) Append(n FactorChild) { rule.ListRule.Append(n) } -type PowerChildNode interface { - Node +type PowerChild interface { + Rule powerChild() } type Power struct { - ListNode + ListRule } func NewPower() *Power { - node := &Power{} - node.initBaseNode(POWER) - node.initListNode() - return node + rule := &Power{} + rule.initBaseRule(symbol.POWER) + rule.initListRule() + return rule } -func (node *Power) factorChild() {} -func (node *Power) Append(n PowerChildNode) { node.ListNode.Append(n) } +func (rule *Power) factorChild() {} +func (rule *Power) Append(n PowerChild) { rule.ListRule.Append(n) } -type AtomExpressionChildNode interface { - Node +type AtomExpressionChild interface { + Rule atomExpressionChild() } type AtomExpression struct { - ListNode + ListRule } func NewAtomExpression() *AtomExpression { - node := &AtomExpression{} - node.initBaseNode(ATOM_EXPR) - node.initListNode() - return node + rule := &AtomExpression{} + rule.initBaseRule(symbol.ATOM_EXPR) + rule.initListRule() + return rule } -func (node *AtomExpression) powerChild() {} -func (node *AtomExpression) Append(n AtomExpressionChildNode) { node.ListNode.Append(n) } +func (rule *AtomExpression) powerChild() {} +func (rule *AtomExpression) Append(n AtomExpressionChild) { rule.ListRule.Append(n) } -type AtomChildNode interface { - Node +type AtomChild interface { + Rule atomChild() } type Atom struct { - ListNode + ListRule } func NewAtom() *Atom { - node := &Atom{} - node.initBaseNode(ATOM) - node.initListNode() - return node + rule := &Atom{} + rule.initBaseRule(symbol.ATOM) + rule.initListRule() + return rule } -func (node *Atom) atomExpressionChild() {} -func (node *Atom) Append(n AtomChildNode) { node.ListNode.Append(n) } +func (rule *Atom) atomExpressionChild() {} +func (rule *Atom) Append(n AtomChild) { rule.ListRule.Append(n) } -type TrailerChildNode interface { - Node +type TrailerChild interface { + Rule trailerChild() } type Trailer struct { - ListNode + ListRule } func NewTrailer() *Trailer { - node := &Trailer{} - node.initBaseNode(TRAILER) - node.initListNode() - return node + rule := &Trailer{} + rule.initBaseRule(symbol.TRAILER) + rule.initListRule() + return rule } -func (node *Trailer) atomExpressionChild() {} -func (node *Trailer) Append(n TrailerChildNode) { node.ListNode.Append(n) } +func (rule *Trailer) atomExpressionChild() {} +func (rule *Trailer) Append(n TrailerChild) { rule.ListRule.Append(n) } diff --git a/grammar/nodeid.go b/grammar/nodeid.go deleted file mode 100644 index 0b22307..0000000 --- a/grammar/nodeid.go +++ /dev/null @@ -1,91 +0,0 @@ -package grammar - -type NodeID int - -const ( - SINGLE_INPUT NodeID = 256 - FILE_INPUT NodeID = 257 - EVAL_INPUT NodeID = 258 - DECORATOR NodeID = 259 - DECORATORS NodeID = 260 - DECORATED NodeID = 261 - ASYNC_FUNCDEF NodeID = 262 - FUNCDEF NodeID = 263 - PARAMETERS NodeID = 264 - TYPEDARGSLIST NodeID = 265 - TFPDEF NodeID = 266 - VARARGSLIST NodeID = 267 - VFPDEF NodeID = 268 - STMT NodeID = 269 - SIMPLE_STMT NodeID = 270 - SMALL_STMT NodeID = 271 - EXPR_STMT NodeID = 272 - TESTLIST_STAR_EXPR NodeID = 273 - AUGASSIGN NodeID = 274 - DEL_STMT NodeID = 275 - PASS_STMT NodeID = 276 - FLOW_STMT NodeID = 277 - BREAK_STMT NodeID = 278 - CONTINUE_STMT NodeID = 279 - RETURN_STMT NodeID = 280 - YIELD_STMT NodeID = 281 - RAISE_STMT NodeID = 282 - IMPORT_STMT NodeID = 283 - IMPORT_NAME NodeID = 284 - IMPORT_FROM NodeID = 285 - IMPORT_AS_NAME NodeID = 286 - DOTTED_AS_NAME NodeID = 287 - IMPORT_AS_NAMES NodeID = 288 - DOTTED_AS_NAMES NodeID = 289 - DOTTED_NAME NodeID = 290 - GLOBAL_STMT NodeID = 291 - NONLOCAL_STMT NodeID = 292 - ASSERT_STMT NodeID = 293 - COMPOUND_STMT NodeID = 294 - ASYNC_STMT NodeID = 295 - IF_STMT NodeID = 296 - WHILE_STMT NodeID = 297 - FOR_STMT NodeID = 298 - TRY_STMT NodeID = 299 - WITH_STMT NodeID = 300 - WITH_ITEM NodeID = 301 - EXCEPT_CLAUSE NodeID = 302 - SUITE NodeID = 303 - TEST NodeID = 304 - TEST_NOCOND NodeID = 305 - LAMBDEF NodeID = 306 - LAMBDEF_NOCOND NodeID = 307 - OR_TEST NodeID = 308 - AND_TEST NodeID = 309 - NOT_TEST NodeID = 310 - COMPARISON NodeID = 311 - COMP_OP NodeID = 312 - STAR_EXPR NodeID = 313 - EXPR NodeID = 314 - XOR_EXPR NodeID = 315 - AND_EXPR NodeID = 316 - SHIFT_EXPR NodeID = 317 - ARITH_EXPR NodeID = 318 - TERM NodeID = 319 - FACTOR NodeID = 320 - POWER NodeID = 321 - ATOM_EXPR NodeID = 322 - ATOM NodeID = 323 - TESTLIST_COMP NodeID = 324 - TRAILER NodeID = 325 - SUBSCRIPTLIST NodeID = 326 - SUBSCRIPT NodeID = 327 - SLICEOP NodeID = 328 - EXPRLIST NodeID = 329 - TESTLIST NodeID = 330 - DICTORSETMAKER NodeID = 331 - CLASSDEF NodeID = 332 - ARGLIST NodeID = 333 - ARGUMENT NodeID = 334 - COMP_ITER NodeID = 335 - COMP_FOR NodeID = 336 - COMP_IF NodeID = 337 - ENCODING_DECL NodeID = 338 - YIELD_EXPR NodeID = 339 - YIELD_ARG NodeID = 340 -) diff --git a/grammar/nodename.go b/grammar/nodename.go deleted file mode 100644 index 649471f..0000000 --- a/grammar/nodename.go +++ /dev/null @@ -1,89 +0,0 @@ -package grammar - -var NodeNames = [...]string{ - SINGLE_INPUT: "SINGLE_INPUT", - FILE_INPUT: "FILE_INPUT", - EVAL_INPUT: "EVAL_INPUT", - DECORATOR: "DECORATOR", - DECORATORS: "DECORATORS", - DECORATED: "DECORATED", - ASYNC_FUNCDEF: "ASYNC_FUNCDEF", - FUNCDEF: "FUNCDEF", - PARAMETERS: "PARAMETERS", - TYPEDARGSLIST: "TYPEDARGSLIST", - TFPDEF: "TFPDEF", - VARARGSLIST: "VARARGSLIST", - VFPDEF: "VFPDEF", - STMT: "STMT", - SIMPLE_STMT: "SIMPLE_STMT", - SMALL_STMT: "SMALL_STMT", - EXPR_STMT: "EXPR_STMT", - TESTLIST_STAR_EXPR: "TESTLIST_STAR_EXPR", - AUGASSIGN: "AUGASSIGN", - DEL_STMT: "DEL_STMT", - PASS_STMT: "PASS_STMT", - FLOW_STMT: "FLOW_STMT", - BREAK_STMT: "BREAK_STMT", - CONTINUE_STMT: "CONTINUE_STMT", - RETURN_STMT: "RETURN_STMT", - YIELD_STMT: "YIELD_STMT", - RAISE_STMT: "RAISE_STMT", - IMPORT_STMT: "IMPORT_STMT", - IMPORT_NAME: "IMPORT_NAME", - IMPORT_FROM: "IMPORT_FROM", - IMPORT_AS_NAME: "IMPORT_AS_NAME", - DOTTED_AS_NAME: "DOTTED_AS_NAME", - IMPORT_AS_NAMES: "IMPORT_AS_NAMES", - DOTTED_AS_NAMES: "DOTTED_AS_NAMES", - DOTTED_NAME: "DOTTED_NAME", - GLOBAL_STMT: "GLOBAL_STMT", - NONLOCAL_STMT: "NONLOCAL_STMT", - ASSERT_STMT: "ASSERT_STMT", - COMPOUND_STMT: "COMPOUND_STMT", - ASYNC_STMT: "ASYNC_STMT", - IF_STMT: "IF_STMT", - WHILE_STMT: "WHILE_STMT", - FOR_STMT: "FOR_STMT", - TRY_STMT: "TRY_STMT", - WITH_STMT: "WITH_STMT", - WITH_ITEM: "WITH_ITEM", - EXCEPT_CLAUSE: "EXCEPT_CLAUSE", - SUITE: "SUITE", - TEST: "TEST", - TEST_NOCOND: "TEST_NOCOND", - LAMBDEF: "LAMBDEF", - LAMBDEF_NOCOND: "LAMBDEF_NOCOND", - OR_TEST: "OR_TEST", - AND_TEST: "AND_TEST", - NOT_TEST: "NOT_TEST", - COMPARISON: "COMPARISON", - COMP_OP: "COMP_OP", - STAR_EXPR: "STAR_EXPR", - EXPR: "EXPR", - XOR_EXPR: "XOR_EXPR", - AND_EXPR: "AND_EXPR", - SHIFT_EXPR: "SHIFT_EXPR", - ARITH_EXPR: "ARITH_EXPR", - TERM: "TERM", - FACTOR: "FACTOR", - POWER: "POWER", - ATOM_EXPR: "ATOM_EXPR", - ATOM: "ATOM", - TESTLIST_COMP: "TESTLIST_COMP", - TRAILER: "TRAILER", - SUBSCRIPTLIST: "SUBSCRIPTLIST", - SUBSCRIPT: "SUBSCRIPT", - SLICEOP: "SLICEOP", - EXPRLIST: "EXPRLIST", - TESTLIST: "TESTLIST", - DICTORSETMAKER: "DICTORSETMAKER", - CLASSDEF: "CLASSDEF", - ARGLIST: "ARGLIST", - ARGUMENT: "ARGUMENT", - COMP_ITER: "COMP_ITER", - COMP_FOR: "COMP_FOR", - COMP_IF: "COMP_IF", - ENCODING_DECL: "ENCODING_DECL", - YIELD_EXPR: "YIELD_EXPR", - YIELD_ARG: "YIELD_ARG", -} diff --git a/grammar/nodes.go b/grammar/nodes.go deleted file mode 100644 index c12d4ff..0000000 --- a/grammar/nodes.go +++ /dev/null @@ -1,81 +0,0 @@ -package grammar - -import ( - "fmt" - - "github.com/brettlangdon/gython/token" -) - -type Node interface { - Name() string - Repr() []interface{} -} - -type TokenNode struct { - Token *token.Token -} - -func NewTokenNode(tok *token.Token) *TokenNode { - return &TokenNode{ - Token: tok, - } -} -func (node *TokenNode) atomChild() {} -func (node *TokenNode) atomExpressionChild() {} -func (node *TokenNode) comparisonChild() {} -func (node *TokenNode) expressionStatementChild() {} -func (node *TokenNode) factorChild() {} -func (node *TokenNode) fileInputChild() {} -func (node *TokenNode) shiftExpressionChild() {} -func (node *TokenNode) simpleStatementChild() {} -func (node *TokenNode) trailerChild() {} -func (node *TokenNode) Name() string { return token.TokenNames[node.Token.ID] } -func (node *TokenNode) Repr() []interface{} { - parts := make([]interface{}, 0) - parts = append(parts, node.Name()) - literal := fmt.Sprintf("%#v", node.Token.Literal) - return append(parts, literal) -} - -type BaseNode struct { - ID NodeID - child Node -} - -func (node *BaseNode) initBaseNode(id NodeID) { node.ID = id } -func (node *BaseNode) Name() string { return NodeNames[node.ID] } -func (node *BaseNode) Repr() (parts []interface{}) { return append(parts, node.Name()) } - -type ParentNode struct { - BaseNode - child Node -} - -func (node *ParentNode) SetChild(n Node) { node.child = n } -func (node *ParentNode) Child() Node { return node.child } -func (node *ParentNode) Repr() (parts []interface{}) { - parts = node.BaseNode.Repr() - child := node.Child() - if child != nil { - parts = append(parts, child.Repr()) - } - return parts -} - -type ListNode struct { - BaseNode - children []Node -} - -func (node *ListNode) initListNode() { node.children = make([]Node, 0) } -func (node *ListNode) Length() int { return len(node.children) } -func (node *ListNode) Children() []Node { return node.children } -func (node *ListNode) Append(n Node) { node.children = append(node.children, n) } -func (node *ListNode) Repr() (parts []interface{}) { - parts = node.BaseNode.Repr() - children := node.Children() - for _, child := range children { - parts = append(parts, child.Repr()) - } - return parts -} diff --git a/grammar/rules.go b/grammar/rules.go new file mode 100644 index 0000000..8e4c05c --- /dev/null +++ b/grammar/rules.go @@ -0,0 +1,82 @@ +package grammar + +import ( + "fmt" + + "github.com/brettlangdon/gython/symbol" + "github.com/brettlangdon/gython/token" +) + +type Rule interface { + Name() string + Repr() []interface{} +} + +type TokenRule struct { + Token *token.Token +} + +func NewTokenRule(tok *token.Token) *TokenRule { + return &TokenRule{ + Token: tok, + } +} +func (rule *TokenRule) atomChild() {} +func (rule *TokenRule) atomExpressionChild() {} +func (rule *TokenRule) comparisonChild() {} +func (rule *TokenRule) expressionStatementChild() {} +func (rule *TokenRule) factorChild() {} +func (rule *TokenRule) fileInputChild() {} +func (rule *TokenRule) shiftExpressionChild() {} +func (rule *TokenRule) simpleStatementChild() {} +func (rule *TokenRule) trailerChild() {} +func (rule *TokenRule) Name() string { return token.TokenNames[rule.Token.ID] } +func (rule *TokenRule) Repr() []interface{} { + parts := make([]interface{}, 0) + parts = append(parts, rule.Name()) + literal := fmt.Sprintf("%#v", rule.Token.Literal) + return append(parts, literal) +} + +type BaseRule struct { + ID symbol.SymbolID + child Rule +} + +func (rule *BaseRule) initBaseRule(id symbol.SymbolID) { rule.ID = id } +func (rule *BaseRule) Name() string { return symbol.SymbolNames[rule.ID] } +func (rule *BaseRule) Repr() (parts []interface{}) { return append(parts, rule.Name()) } + +type ParentRule struct { + BaseRule + child Rule +} + +func (rule *ParentRule) SetChild(n Rule) { rule.child = n } +func (rule *ParentRule) Child() Rule { return rule.child } +func (rule *ParentRule) Repr() (parts []interface{}) { + parts = rule.BaseRule.Repr() + child := rule.Child() + if child != nil { + parts = append(parts, child.Repr()) + } + return parts +} + +type ListRule struct { + BaseRule + children []Rule +} + +func (rule *ListRule) initListRule() { rule.children = make([]Rule, 0) } +func (rule *ListRule) Length() int { return len(rule.children) } +func (rule *ListRule) Children() []Rule { return rule.children } +func (rule *ListRule) Append(n Rule) { rule.children = append(rule.children, n) } +func (rule *ListRule) Repr() (parts []interface{}) { + parts = rule.BaseRule.Repr() + children := rule.Children() + for _, child := range children { + parts = append(parts, child.Repr()) + } + return parts +} diff --git a/grammar/start.go b/grammar/start.go index 3995e24..f6f1d1e 100644 --- a/grammar/start.go +++ b/grammar/start.go @@ -1,19 +1,21 @@ package grammar -type FileInputChildNode interface { - Node +import "github.com/brettlangdon/gython/symbol" + +type FileInputChild interface { + Rule fileInputChild() } type FileInput struct { - ListNode + ListRule } func NewFileInput() *FileInput { - node := &FileInput{} - node.initBaseNode(FILE_INPUT) - node.initListNode() - return node + rule := &FileInput{} + rule.initBaseRule(symbol.FILE_INPUT) + rule.initListRule() + return rule } -func (node *FileInput) Append(n FileInputChildNode) { node.ListNode.Append(n) } +func (rule *FileInput) Append(n FileInputChild) { rule.ListRule.Append(n) } diff --git a/grammar/statements.go b/grammar/statements.go index 2e70023..0398ea0 100644 --- a/grammar/statements.go +++ b/grammar/statements.go @@ -1,87 +1,89 @@ package grammar -type StatementChildNode interface { - Node - stmtChildNode() +import "github.com/brettlangdon/gython/symbol" + +type StatementChild interface { + Rule + stmtChild() } type Statement struct { - ParentNode + ParentRule } func NewStatement() *Statement { - node := &Statement{} - node.initBaseNode(STMT) - return node + rule := &Statement{} + rule.initBaseRule(symbol.STMT) + return rule } -func (node *Statement) fileInputChild() {} -func (node *Statement) SetChild(n StatementChildNode) { node.ParentNode.SetChild(n) } +func (rule *Statement) fileInputChild() {} +func (rule *Statement) SetChild(n StatementChild) { rule.ParentRule.SetChild(n) } -type SimpleStatementChildNode interface { - Node +type SimpleStatementChild interface { + Rule simpleStatementChild() } type SimpleStatement struct { - ListNode + ListRule } func NewSimpleStatement() *SimpleStatement { - node := &SimpleStatement{} - node.initBaseNode(SIMPLE_STMT) - node.initListNode() - return node + rule := &SimpleStatement{} + rule.initBaseRule(symbol.SIMPLE_STMT) + rule.initListRule() + return rule } -func (node *SimpleStatement) stmtChildNode() {} -func (node *SimpleStatement) Append(n SimpleStatementChildNode) { node.ListNode.Append(n) } +func (rule *SimpleStatement) stmtChild() {} +func (rule *SimpleStatement) Append(n SimpleStatementChild) { rule.ListRule.Append(n) } type CompoundStatement struct { - BaseNode + BaseRule } func NewCompoundStatement() *CompoundStatement { - node := &CompoundStatement{} - node.initBaseNode(COMPOUND_STMT) - return node + rule := &CompoundStatement{} + rule.initBaseRule(symbol.COMPOUND_STMT) + return rule } -func (node *CompoundStatement) stmtChildNode() {} +func (rule *CompoundStatement) stmtChild() {} -type SmallStatementChildNode interface { - Node +type SmallStatementChild interface { + Rule smallStmtChild() } type SmallStatement struct { - ParentNode + ParentRule } func NewSmallStatement() *SmallStatement { - node := &SmallStatement{} - node.initBaseNode(SMALL_STMT) - return node + rule := &SmallStatement{} + rule.initBaseRule(symbol.SMALL_STMT) + return rule } -func (node *SmallStatement) simpleStatementChild() {} -func (node *SmallStatement) SetChild(n SmallStatementChildNode) { node.ParentNode.SetChild(n) } +func (rule *SmallStatement) simpleStatementChild() {} +func (rule *SmallStatement) SetChild(n SmallStatementChild) { rule.ParentRule.SetChild(n) } -type ExpressionStatementChildNode interface { - Node +type ExpressionStatementChild interface { + Rule expressionStatementChild() } type ExpressionStatement struct { - ListNode + ListRule Expression *TestlistStarExpression } func NewExpressionStatement() *ExpressionStatement { - node := &ExpressionStatement{} - node.initBaseNode(EXPR_STMT) - node.initListNode() - return node + rule := &ExpressionStatement{} + rule.initBaseRule(symbol.EXPR_STMT) + rule.initListRule() + return rule } -func (node *ExpressionStatement) smallStmtChild() {} -func (node *ExpressionStatement) Append(n ExpressionStatementChildNode) { node.ListNode.Append(n) } +func (rule *ExpressionStatement) smallStmtChild() {} +func (rule *ExpressionStatement) Append(n ExpressionStatementChild) { rule.ListRule.Append(n) } diff --git a/grammar/tests.go b/grammar/tests.go index 83e0e70..0f51bd7 100644 --- a/grammar/tests.go +++ b/grammar/tests.go @@ -1,75 +1,77 @@ package grammar -type TestChildNode interface { - Node +import "github.com/brettlangdon/gython/symbol" + +type TestChild interface { + Rule testChild() } type Test struct { - ListNode + ListRule } func NewTest() *Test { - node := &Test{} - node.initBaseNode(TEST) - return node + rule := &Test{} + rule.initBaseRule(symbol.TEST) + return rule } -func (node *Test) testlistStarExpressionChild() {} -func (node *Test) testChild() {} -func (node *Test) Append(n TestChildNode) { node.ListNode.Append(n) } +func (rule *Test) testlistStarExpressionChild() {} +func (rule *Test) testChild() {} +func (rule *Test) Append(n TestChild) { rule.ListRule.Append(n) } -type OrTestChildNode interface { - Node +type OrTestChild interface { + Rule orTestChild() } type OrTest struct { - ListNode + ListRule } func NewOrTest() *OrTest { - node := &OrTest{} - node.initBaseNode(OR_TEST) - return node + rule := &OrTest{} + rule.initBaseRule(symbol.OR_TEST) + return rule } -func (node *OrTest) testChild() {} -func (node *OrTest) Append(n OrTestChildNode) { node.ListNode.Append(n) } +func (rule *OrTest) testChild() {} +func (rule *OrTest) Append(n OrTestChild) { rule.ListRule.Append(n) } -type AndTestChildNode interface { - Node +type AndTestChild interface { + Rule andTestChild() } type AndTest struct { - ListNode + ListRule } func NewAndTest() *AndTest { - node := &AndTest{} - node.initBaseNode(AND_TEST) - return node + rule := &AndTest{} + rule.initBaseRule(symbol.AND_TEST) + return rule } -func (node *AndTest) orTestChild() {} -func (node *AndTest) Append(n AndTestChildNode) { node.ListNode.Append(n) } +func (rule *AndTest) orTestChild() {} +func (rule *AndTest) Append(n AndTestChild) { rule.ListRule.Append(n) } type NotTestChild interface { - Node + Rule notTestChild() } type NotTest struct { - ParentNode + ParentRule } func NewNotTest() *NotTest { - node := &NotTest{} - node.initBaseNode(NOT_TEST) - return node + rule := &NotTest{} + rule.initBaseRule(symbol.NOT_TEST) + return rule } -func (node *NotTest) notTestChild() {} -func (node *NotTest) andTestChild() {} -func (node *NotTest) SetChild(n NotTestChild) { node.ParentNode.SetChild(n) } +func (rule *NotTest) notTestChild() {} +func (rule *NotTest) andTestChild() {} +func (rule *NotTest) SetChild(n NotTestChild) { rule.ParentRule.SetChild(n) } diff --git a/main.go b/main.go index 3a3082c..8d594d5 100644 --- a/main.go +++ b/main.go @@ -23,8 +23,9 @@ func tokenize() { } func parse() { - root, p := parser.ParseGrammar(os.Stdin) - fmt.Println(p) + gp := parser.NewGrammarParser(os.Stdin) + root := gp.Parse() + fmt.Println(gp) fmt.Println(root.Repr()) } diff --git a/parser/parser.go b/parser/grammar.go similarity index 78% rename from parser/parser.go rename to parser/grammar.go index d555226..9bbdf52 100644 --- a/parser/parser.go +++ b/parser/grammar.go @@ -9,13 +9,13 @@ import ( "github.com/brettlangdon/gython/token" ) -type Parser struct { +type GrammarParser struct { Errors []*Error tokenizer *scanner.Scanner tokenBuffer []*token.Token } -func (parser *Parser) nextToken() *token.Token { +func (parser *GrammarParser) nextToken() *token.Token { if len(parser.tokenBuffer) > 0 { last := len(parser.tokenBuffer) - 1 next := parser.tokenBuffer[last] @@ -26,17 +26,17 @@ func (parser *Parser) nextToken() *token.Token { return parser.tokenizer.NextToken() } -func (parser *Parser) unreadToken(tok *token.Token) { +func (parser *GrammarParser) unreadToken(tok *token.Token) { parser.tokenBuffer = append(parser.tokenBuffer, tok) } -func (parser *Parser) addError(msg string) { +func (parser *GrammarParser) addError(msg string) { parser.Errors = append(parser.Errors, &Error{ Message: msg, }) } -func (parser *Parser) expect(tokID token.TokenID) bool { +func (parser *GrammarParser) expect(tokID token.TokenID) bool { next := parser.nextToken() if next.ID != tokID { msg := "Unexpected token \"" + next.ID.String() + "\" expected \"" + tokID.String() + "\"" @@ -46,7 +46,7 @@ func (parser *Parser) expect(tokID token.TokenID) bool { return true } -func (parser *Parser) expectLiteral(literal string) bool { +func (parser *GrammarParser) expectLiteral(literal string) bool { next := parser.nextToken() if !next.IsLiteral(literal) { msg := "Unexpected literal \"" + next.Literal + "\" expected \"" + literal + "\"" @@ -57,7 +57,7 @@ func (parser *Parser) expectLiteral(literal string) bool { } // compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt -func (parser *Parser) parseCompoundStatement() *grammar.CompoundStatement { +func (parser *GrammarParser) parseCompoundStatement() *grammar.CompoundStatement { compoundStmt := grammar.NewCompoundStatement() return compoundStmt } @@ -66,28 +66,28 @@ func (parser *Parser) parseCompoundStatement() *grammar.CompoundStatement { // '[' [testlist_comp] ']' | // '{' [dictorsetmaker] '}' | // NAME | NUMBER | STRING+ | '...' | 'None' | 'True' | 'False') -func (parser *Parser) parseAtom() *grammar.Atom { +func (parser *GrammarParser) parseAtom() *grammar.Atom { atom := grammar.NewAtom() next := parser.nextToken() switch next.ID { case token.NAME, token.NUMBER, token.ELLIPSIS: - atom.Append(grammar.NewTokenNode(next)) + atom.Append(grammar.NewTokenRule(next)) case token.STRING: - atom.Append(grammar.NewTokenNode(next)) + atom.Append(grammar.NewTokenRule(next)) for { next := parser.nextToken() if next.ID != token.STRING { parser.unreadToken(next) break } - atom.Append(grammar.NewTokenNode(next)) + atom.Append(grammar.NewTokenRule(next)) } } return atom } // trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME -func (parser *Parser) parseTrailer() *grammar.Trailer { +func (parser *GrammarParser) parseTrailer() *grammar.Trailer { trailer := grammar.NewTrailer() next := parser.nextToken() switch next.ID { @@ -96,20 +96,20 @@ func (parser *Parser) parseTrailer() *grammar.Trailer { if next2.ID != token.RPAR { return nil } - trailer.Append(grammar.NewTokenNode(next)) - trailer.Append(grammar.NewTokenNode(next2)) + trailer.Append(grammar.NewTokenRule(next)) + trailer.Append(grammar.NewTokenRule(next2)) case token.LBRACE: next2 := parser.nextToken() if next2.ID != token.RBRACE { return nil } - trailer.Append(grammar.NewTokenNode(next)) - trailer.Append(grammar.NewTokenNode(next2)) + trailer.Append(grammar.NewTokenRule(next)) + trailer.Append(grammar.NewTokenRule(next2)) case token.DOT: next2 := parser.nextToken() if next2.ID == token.NAME { - trailer.Append(grammar.NewTokenNode(next)) - trailer.Append(grammar.NewTokenNode(next2)) + trailer.Append(grammar.NewTokenRule(next)) + trailer.Append(grammar.NewTokenRule(next2)) } else { parser.addError("Expected \"NAME\" instead found \"" + next.ID.String() + "\"") return nil @@ -122,11 +122,11 @@ func (parser *Parser) parseTrailer() *grammar.Trailer { } // atom_expr: [AWAIT] atom trailer* -func (parser *Parser) parseAtomExpression() *grammar.AtomExpression { +func (parser *GrammarParser) parseAtomExpression() *grammar.AtomExpression { expr := grammar.NewAtomExpression() next := parser.nextToken() if next.ID == token.AWAIT { - expr.Append(grammar.NewTokenNode(next)) + expr.Append(grammar.NewTokenRule(next)) } else { parser.unreadToken(next) } @@ -148,7 +148,7 @@ func (parser *Parser) parseAtomExpression() *grammar.AtomExpression { } // power: atom_expr ['**' factor] -func (parser *Parser) parsePower() *grammar.Power { +func (parser *GrammarParser) parsePower() *grammar.Power { power := grammar.NewPower() atomExpr := parser.parseAtomExpression() if atomExpr == nil { @@ -171,7 +171,7 @@ func (parser *Parser) parsePower() *grammar.Power { } // factor: ('+'|'-'|'~') factor | power -func (parser *Parser) parseFactor() *grammar.Factor { +func (parser *GrammarParser) parseFactor() *grammar.Factor { factor := grammar.NewFactor() next := parser.nextToken() switch next.ID { @@ -180,7 +180,7 @@ func (parser *Parser) parseFactor() *grammar.Factor { if node == nil { return nil } - factor.Append(grammar.NewTokenNode(next)) + factor.Append(grammar.NewTokenRule(next)) factor.Append(node) default: parser.unreadToken(next) @@ -195,7 +195,7 @@ func (parser *Parser) parseFactor() *grammar.Factor { } // term: factor (('*'|'@'|'/'|'%'|'//') factor)* -func (parser *Parser) parseTerm() *grammar.Term { +func (parser *GrammarParser) parseTerm() *grammar.Term { term := grammar.NewTerm() factor := parser.parseFactor() if factor == nil { @@ -218,7 +218,7 @@ func (parser *Parser) parseTerm() *grammar.Term { } // arith_expr: term (('+'|'-') term)* -func (parser *Parser) parseArithmetricExpression() *grammar.ArithmeticExpression { +func (parser *GrammarParser) parseArithmetricExpression() *grammar.ArithmeticExpression { expr := grammar.NewArithmeticExpression() term := parser.parseTerm() if term == nil { @@ -241,7 +241,7 @@ func (parser *Parser) parseArithmetricExpression() *grammar.ArithmeticExpression } // shift_expr: arith_expr (('<<'|'>>') arith_expr)* -func (parser *Parser) parseShiftExpression() *grammar.ShiftExpression { +func (parser *GrammarParser) parseShiftExpression() *grammar.ShiftExpression { expr := grammar.NewShiftExpression() arithExpr := parser.parseArithmetricExpression() if arithExpr == nil { @@ -254,7 +254,7 @@ func (parser *Parser) parseShiftExpression() *grammar.ShiftExpression { parser.unreadToken(next) break } - expr.Append(grammar.NewTokenNode(next)) + expr.Append(grammar.NewTokenRule(next)) arithExpr := parser.parseArithmetricExpression() if arithExpr == nil { return nil @@ -265,7 +265,7 @@ func (parser *Parser) parseShiftExpression() *grammar.ShiftExpression { } // and_expr: shift_expr ('&' shift_expr)* -func (parser *Parser) parseAndExpression() *grammar.AndExpression { +func (parser *GrammarParser) parseAndExpression() *grammar.AndExpression { expr := grammar.NewAndExpression() shiftExpr := parser.parseShiftExpression() if shiftExpr == nil { @@ -288,7 +288,7 @@ func (parser *Parser) parseAndExpression() *grammar.AndExpression { } // xor_expr: and_expr ('^' and_expr)* -func (parser *Parser) parseXorExpression() *grammar.XorExpression { +func (parser *GrammarParser) parseXorExpression() *grammar.XorExpression { expr := grammar.NewXorExpression() andExpr := parser.parseAndExpression() if andExpr == nil { @@ -311,7 +311,7 @@ func (parser *Parser) parseXorExpression() *grammar.XorExpression { } // expr: xor_expr ('|' xor_expr)* -func (parser *Parser) parseExpression() *grammar.Expression { +func (parser *GrammarParser) parseExpression() *grammar.Expression { expr := grammar.NewExpression() xorExpr := parser.parseXorExpression() if xorExpr == nil { @@ -334,7 +334,7 @@ func (parser *Parser) parseExpression() *grammar.Expression { } // comparison: expr (comp_op expr)* -func (parser *Parser) parseComparison() *grammar.Comparison { +func (parser *GrammarParser) parseComparison() *grammar.Comparison { comparison := grammar.NewComparison() expr := parser.parseExpression() if expr == nil { @@ -348,20 +348,20 @@ func (parser *Parser) parseComparison() *grammar.Comparison { next := parser.nextToken() switch next.Literal { case "<", ">", "==", ">=", "<=", "<>", "!=", "in": - comparison.Append(grammar.NewTokenNode(next)) + comparison.Append(grammar.NewTokenRule(next)) case "is": - comparison.Append(grammar.NewTokenNode(next)) + comparison.Append(grammar.NewTokenRule(next)) next2 := parser.nextToken() if next2.Literal == "not" { - comparison.Append(grammar.NewTokenNode(next2)) + comparison.Append(grammar.NewTokenRule(next2)) } else { parser.unreadToken(next2) } case "not": next2 := parser.nextToken() if next2.Literal == "in" { - comparison.Append(grammar.NewTokenNode(next)) - comparison.Append(grammar.NewTokenNode(next2)) + comparison.Append(grammar.NewTokenRule(next)) + comparison.Append(grammar.NewTokenRule(next2)) } else { parser.unreadToken(next2) parser.unreadToken(next) @@ -385,7 +385,7 @@ func (parser *Parser) parseComparison() *grammar.Comparison { } // not_test: 'not' not_test | comparison -func (parser *Parser) parseNotTest() *grammar.NotTest { +func (parser *GrammarParser) parseNotTest() *grammar.NotTest { notTest := grammar.NewNotTest() next := parser.nextToken() if next.IsLiteral("not") { @@ -406,7 +406,7 @@ func (parser *Parser) parseNotTest() *grammar.NotTest { } // and_test: not_test ('and' not_test)* -func (parser *Parser) parseAndTest() *grammar.AndTest { +func (parser *GrammarParser) parseAndTest() *grammar.AndTest { andTest := grammar.NewAndTest() notTest := parser.parseNotTest() if notTest == nil { @@ -430,7 +430,7 @@ func (parser *Parser) parseAndTest() *grammar.AndTest { } // or_test: and_test ('or' and_test)* -func (parser *Parser) parseOrTest() *grammar.OrTest { +func (parser *GrammarParser) parseOrTest() *grammar.OrTest { orTest := grammar.NewOrTest() andTest := parser.parseAndTest() if andTest == nil { @@ -453,7 +453,7 @@ func (parser *Parser) parseOrTest() *grammar.OrTest { } // test: or_test ['if' or_test 'else' test] | lambdef -func (parser *Parser) parseTest() *grammar.Test { +func (parser *GrammarParser) parseTest() *grammar.Test { test := grammar.NewTest() orTest := parser.parseOrTest() @@ -483,10 +483,10 @@ func (parser *Parser) parseTest() *grammar.Test { } // testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] -func (parser *Parser) parseTestlistStarExpression() *grammar.TestlistStarExpression { +func (parser *GrammarParser) parseTestlistStarExpression() *grammar.TestlistStarExpression { testlistStarExpression := grammar.NewTestListStarExpression() - var expr grammar.TestlistStarExpressionChildNode + var expr grammar.TestlistStarExpressionChild expr = parser.parseTest() if expr == nil { return nil @@ -497,7 +497,7 @@ func (parser *Parser) parseTestlistStarExpression() *grammar.TestlistStarExpress // expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) | // ('=' (yield_expr|testlist_star_expr))*) -func (parser *Parser) parseExpressionStatement() *grammar.ExpressionStatement { +func (parser *GrammarParser) parseExpressionStatement() *grammar.ExpressionStatement { exprStmt := grammar.NewExpressionStatement() expr := parser.parseTestlistStarExpression() if expr == nil { @@ -513,7 +513,7 @@ func (parser *Parser) parseExpressionStatement() *grammar.ExpressionStatement { parser.unreadToken(next) break } - exprStmt.Append(grammar.NewTokenNode(next)) + exprStmt.Append(grammar.NewTokenRule(next)) expr := parser.parseTestlistStarExpression() if expr == nil { return nil @@ -527,10 +527,10 @@ func (parser *Parser) parseExpressionStatement() *grammar.ExpressionStatement { // small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | // import_stmt | global_stmt | nonlocal_stmt | assert_stmt) -func (parser *Parser) parseSmallStatment() *grammar.SmallStatement { +func (parser *GrammarParser) parseSmallStatment() *grammar.SmallStatement { smallStmt := grammar.NewSmallStatement() - var stmt grammar.SmallStatementChildNode + var stmt grammar.SmallStatementChild stmt = parser.parseExpressionStatement() if stmt != nil { smallStmt.SetChild(stmt) @@ -543,7 +543,7 @@ func (parser *Parser) parseSmallStatment() *grammar.SmallStatement { } // simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE -func (parser *Parser) parseSimpleStatement() *grammar.SimpleStatement { +func (parser *GrammarParser) parseSimpleStatement() *grammar.SimpleStatement { simpleStmt := grammar.NewSimpleStatement() for { smallStmt := parser.parseSmallStatment() @@ -562,7 +562,7 @@ func (parser *Parser) parseSimpleStatement() *grammar.SimpleStatement { parser.addError("Expected \"NEWLINE\" instead found \"" + next.ID.String() + "\"") return nil } - simpleStmt.Append(grammar.NewTokenNode(next)) + simpleStmt.Append(grammar.NewTokenRule(next)) // no small statements found if simpleStmt.Length() == 0 { @@ -572,8 +572,8 @@ func (parser *Parser) parseSimpleStatement() *grammar.SimpleStatement { } // stmt: simple_stmt | compound_stmt -func (parser *Parser) parseStatement() *grammar.Statement { - var next grammar.StatementChildNode +func (parser *GrammarParser) parseStatement() *grammar.Statement { + var next grammar.StatementChild next = parser.parseSimpleStatement() if next == nil { next = parser.parseCompoundStatement() @@ -589,12 +589,12 @@ func (parser *Parser) parseStatement() *grammar.Statement { } // file_input: (NEWLINE | stmt)* ENDMARKER -func (parser *Parser) parseFileInput() *grammar.FileInput { +func (parser *GrammarParser) parseFileInput() *grammar.FileInput { root := grammar.NewFileInput() for parser.tokenizer.State() == errorcode.E_OK { next := parser.nextToken() if next.ID == token.NEWLINE { - root.Append(grammar.NewTokenNode(next)) + root.Append(grammar.NewTokenRule(next)) } else if next.ID == token.ENDMARKER { // Unread, so we can read in the expected value later parser.unreadToken(next) @@ -614,16 +614,18 @@ func (parser *Parser) parseFileInput() *grammar.FileInput { parser.addError("Expected \"ENDMARKER\" instead received \"" + next.ID.String() + "\"") return nil } - root.Append(grammar.NewTokenNode(next)) + root.Append(grammar.NewTokenRule(next)) return root } -func ParseGrammar(r io.Reader) (*grammar.FileInput, *Parser) { - parser := &Parser{ +func NewGrammarParser(r io.Reader) *GrammarParser { + return &GrammarParser{ tokenizer: scanner.NewScanner(r), tokenBuffer: make([]*token.Token, 0), } +} - return parser.parseFileInput(), parser +func (parser *GrammarParser) Parse() *grammar.FileInput { + return parser.parseFileInput() }