simplify gherkin ast, since it is only one level deep token list
Этот коммит содержится в:
родитель
fc1f94c999
коммит
1f4ac0e8ec
10 изменённых файлов: 51 добавлений и 112 удалений
|
@ -132,7 +132,7 @@ func (c *config) features() (lst []*gherkin.Feature, err error) {
|
||||||
// parse features
|
// parse features
|
||||||
err = filepath.Walk(path, func(p string, f os.FileInfo, err error) error {
|
err = filepath.Walk(path, func(p string, f os.FileInfo, err error) error {
|
||||||
if err == nil && !f.IsDir() && strings.HasSuffix(p, ".feature") {
|
if err == nil && !f.IsDir() && strings.HasSuffix(p, ".feature") {
|
||||||
ft, err := gherkin.Parse(p)
|
ft, err := gherkin.ParseFile(p)
|
||||||
switch {
|
switch {
|
||||||
case err == gherkin.ErrEmpty:
|
case err == gherkin.ErrEmpty:
|
||||||
// its ok, just skip it
|
// its ok, just skip it
|
||||||
|
|
|
@ -1,46 +0,0 @@
|
||||||
package gherkin
|
|
||||||
|
|
||||||
type item struct {
|
|
||||||
next, prev *item
|
|
||||||
value *Token
|
|
||||||
}
|
|
||||||
|
|
||||||
// AST is a linked list to store gherkin Tokens
|
|
||||||
// used to insert errors and other details into
|
|
||||||
// the token tree
|
|
||||||
type AST struct {
|
|
||||||
head, tail *item
|
|
||||||
}
|
|
||||||
|
|
||||||
func newAST() *AST {
|
|
||||||
return &AST{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *AST) addTail(t *Token) *item {
|
|
||||||
it := &item{next: nil, prev: l.tail, value: t}
|
|
||||||
if l.head == nil {
|
|
||||||
l.head = it
|
|
||||||
} else {
|
|
||||||
l.tail.next = it
|
|
||||||
}
|
|
||||||
l.tail = it
|
|
||||||
return l.tail
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *AST) addBefore(t *Token, i *item) *item {
|
|
||||||
it := &item{next: i, prev: i.prev, value: t}
|
|
||||||
i.prev = it
|
|
||||||
if it.prev == nil {
|
|
||||||
l.head = it
|
|
||||||
}
|
|
||||||
return it
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *AST) addAfter(t *Token, i *item) *item {
|
|
||||||
it := &item{next: i.next, prev: i, value: t}
|
|
||||||
i.next = it
|
|
||||||
if it.next == nil {
|
|
||||||
l.tail = it
|
|
||||||
}
|
|
||||||
return it
|
|
||||||
}
|
|
|
@ -1,21 +0,0 @@
|
||||||
package gherkin
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (a *AST) assertMatchesTypes(expected []TokenType, t *testing.T) {
|
|
||||||
key := -1
|
|
||||||
for item := a.head; item != nil; item = item.next {
|
|
||||||
key += 1
|
|
||||||
if len(expected) <= key {
|
|
||||||
t.Fatalf("there are more tokens in AST then expected, next is '%s'", item.value.Type)
|
|
||||||
}
|
|
||||||
if expected[key] != item.value.Type {
|
|
||||||
t.Fatalf("expected ast token '%s', but got '%s' at position: %d", expected[key], item.value.Type, key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(expected)-1 != key {
|
|
||||||
t.Fatalf("expected ast length %d, does not match actual: %d", len(expected), key+1)
|
|
||||||
}
|
|
||||||
}
|
|
Двоичные данные
gherkin/example/example
Исполняемый файл
Двоичные данные
gherkin/example/example
Исполняемый файл
Двоичный файл не отображается.
|
@ -8,7 +8,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
feature, err := gherkin.Parse("ls.feature")
|
feature, err := gherkin.ParseFile("ls.feature")
|
||||||
switch {
|
switch {
|
||||||
case err == gherkin.ErrEmpty:
|
case err == gherkin.ErrEmpty:
|
||||||
log.Println("the feature file is empty and does not describe any feature")
|
log.Println("the feature file is empty and does not describe any feature")
|
||||||
|
|
|
@ -34,7 +34,6 @@ func Test_parse_normal_feature(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testFeatureSamples["feature"])),
|
lx: newLexer(strings.NewReader(testFeatureSamples["feature"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
ft, err := p.parseFeature()
|
ft, err := p.parseFeature()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -47,7 +46,7 @@ func Test_parse_normal_feature(t *testing.T) {
|
||||||
t.Fatalf("expected a feature description to be available")
|
t.Fatalf("expected a feature description to be available")
|
||||||
}
|
}
|
||||||
|
|
||||||
ft.AST.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
FEATURE,
|
FEATURE,
|
||||||
TEXT,
|
TEXT,
|
||||||
TEXT,
|
TEXT,
|
||||||
|
@ -59,7 +58,6 @@ func Test_parse_feature_without_description(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testFeatureSamples["only_title"])),
|
lx: newLexer(strings.NewReader(testFeatureSamples["only_title"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
ft, err := p.parseFeature()
|
ft, err := p.parseFeature()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -72,7 +70,7 @@ func Test_parse_feature_without_description(t *testing.T) {
|
||||||
t.Fatalf("feature description was not expected")
|
t.Fatalf("feature description was not expected")
|
||||||
}
|
}
|
||||||
|
|
||||||
ft.AST.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
FEATURE,
|
FEATURE,
|
||||||
}, t)
|
}, t)
|
||||||
}
|
}
|
||||||
|
@ -81,7 +79,6 @@ func Test_parse_empty_feature_file(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testFeatureSamples["empty"])),
|
lx: newLexer(strings.NewReader(testFeatureSamples["empty"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
_, err := p.parseFeature()
|
_, err := p.parseFeature()
|
||||||
if err != ErrEmpty {
|
if err != ErrEmpty {
|
||||||
|
@ -93,13 +90,12 @@ func Test_parse_invalid_feature_with_random_text(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testFeatureSamples["invalid"])),
|
lx: newLexer(strings.NewReader(testFeatureSamples["invalid"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
_, err := p.parseFeature()
|
_, err := p.parseFeature()
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Fatalf("expected an error but got none")
|
t.Fatalf("expected an error but got none")
|
||||||
}
|
}
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
TEXT,
|
TEXT,
|
||||||
}, t)
|
}, t)
|
||||||
}
|
}
|
||||||
|
@ -108,7 +104,6 @@ func Test_parse_feature_with_newlines(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testFeatureSamples["starts_with_newlines"])),
|
lx: newLexer(strings.NewReader(testFeatureSamples["starts_with_newlines"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
ft, err := p.parseFeature()
|
ft, err := p.parseFeature()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -121,7 +116,7 @@ func Test_parse_feature_with_newlines(t *testing.T) {
|
||||||
t.Fatalf("feature description was not expected")
|
t.Fatalf("feature description was not expected")
|
||||||
}
|
}
|
||||||
|
|
||||||
ft.AST.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
NEW_LINE,
|
NEW_LINE,
|
||||||
NEW_LINE,
|
NEW_LINE,
|
||||||
FEATURE,
|
FEATURE,
|
||||||
|
|
|
@ -61,6 +61,7 @@ package gherkin
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
@ -133,7 +134,7 @@ type Feature struct {
|
||||||
Title string
|
Title string
|
||||||
Background *Background
|
Background *Background
|
||||||
Scenarios []*Scenario
|
Scenarios []*Scenario
|
||||||
AST *AST
|
AST []*Token
|
||||||
}
|
}
|
||||||
|
|
||||||
// PyString is a multiline text object used with step definition
|
// PyString is a multiline text object used with step definition
|
||||||
|
@ -173,34 +174,39 @@ var ErrEmpty = errors.New("the feature file is empty")
|
||||||
type parser struct {
|
type parser struct {
|
||||||
lx *lexer
|
lx *lexer
|
||||||
path string
|
path string
|
||||||
ast *AST
|
ast []*Token
|
||||||
peeked *Token
|
peeked *Token
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse the feature file on the given path into
|
// ParseFile parses a feature file on the given
|
||||||
// the Feature struct
|
// path into the Feature struct
|
||||||
// Returns a Feature struct and error if there is any
|
// Returns a Feature struct and error if there is any
|
||||||
func Parse(path string) (*Feature, error) {
|
func ParseFile(path string) (*Feature, error) {
|
||||||
file, err := os.Open(path)
|
file, err := os.Open(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
|
|
||||||
|
return Parse(file, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the feature as a given name to the Feature struct
|
||||||
|
// Returns a Feature struct and error if there is any
|
||||||
|
func Parse(in io.Reader, name string) (*Feature, error) {
|
||||||
return (&parser{
|
return (&parser{
|
||||||
lx: newLexer(file),
|
lx: newLexer(in),
|
||||||
path: path,
|
path: name,
|
||||||
ast: newAST(),
|
|
||||||
}).parseFeature()
|
}).parseFeature()
|
||||||
}
|
}
|
||||||
|
|
||||||
// reads tokens into AST and skips comments or new lines
|
// reads tokens into AST and skips comments or new lines
|
||||||
func (p *parser) next() *Token {
|
func (p *parser) next() *Token {
|
||||||
if p.ast.tail != nil && p.ast.tail.value.Type == EOF {
|
if len(p.ast) > 0 && p.ast[len(p.ast)-1].Type == EOF {
|
||||||
return p.ast.tail.value // has reached EOF, do not record it more than once
|
return p.ast[len(p.ast)-1] // has reached EOF, do not record it more than once
|
||||||
}
|
}
|
||||||
tok := p.peek()
|
tok := p.peek()
|
||||||
p.ast.addTail(tok)
|
p.ast = append(p.ast, tok)
|
||||||
p.peeked = nil
|
p.peeked = nil
|
||||||
return tok
|
return tok
|
||||||
}
|
}
|
||||||
|
@ -212,7 +218,7 @@ func (p *parser) peek() *Token {
|
||||||
}
|
}
|
||||||
|
|
||||||
for p.peeked = p.lx.read(); p.peeked.OfType(COMMENT, NEW_LINE); p.peeked = p.lx.read() {
|
for p.peeked = p.lx.read(); p.peeked.OfType(COMMENT, NEW_LINE); p.peeked = p.lx.read() {
|
||||||
p.ast.addTail(p.peeked) // record comments and newlines
|
p.ast = append(p.ast, p.peeked) // record comments and newlines
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.peeked
|
return p.peeked
|
||||||
|
|
|
@ -5,6 +5,22 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func (a *parser) assertMatchesTypes(expected []TokenType, t *testing.T) {
|
||||||
|
key := -1
|
||||||
|
for _, tok := range a.ast {
|
||||||
|
key += 1
|
||||||
|
if len(expected) <= key {
|
||||||
|
t.Fatalf("there are more tokens in AST then expected, next is '%s'", tok.Type)
|
||||||
|
}
|
||||||
|
if expected[key] != tok.Type {
|
||||||
|
t.Fatalf("expected ast token '%s', but got '%s' at position: %d", expected[key], tok.Type, key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(expected)-1 != key {
|
||||||
|
t.Fatalf("expected ast length %d, does not match actual: %d", len(expected), key+1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Scenario) assertHasTag(tag string, t *testing.T) {
|
func (s *Scenario) assertHasTag(tag string, t *testing.T) {
|
||||||
if !s.Tags.Has(Tag(tag)) {
|
if !s.Tags.Has(Tag(tag)) {
|
||||||
t.Fatalf("expected scenario '%s' to have '%s' tag, but it did not", s.Title, tag)
|
t.Fatalf("expected scenario '%s' to have '%s' tag, but it did not", s.Title, tag)
|
||||||
|
@ -43,7 +59,6 @@ func Test_parse_feature_file(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(content)),
|
lx: newLexer(strings.NewReader(content)),
|
||||||
path: "usual.feature",
|
path: "usual.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
ft, err := p.parseFeature()
|
ft, err := p.parseFeature()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -51,7 +66,7 @@ func Test_parse_feature_file(t *testing.T) {
|
||||||
}
|
}
|
||||||
ft.assertTitle("gherkin parser", t)
|
ft.assertTitle("gherkin parser", t)
|
||||||
|
|
||||||
ft.AST.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
TAGS,
|
TAGS,
|
||||||
FEATURE,
|
FEATURE,
|
||||||
TEXT,
|
TEXT,
|
||||||
|
|
|
@ -43,7 +43,6 @@ func Test_parse_scenario_outline(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testLexerSamples["scenario_outline_with_examples"])),
|
lx: newLexer(strings.NewReader(testLexerSamples["scenario_outline_with_examples"])),
|
||||||
path: "usual.feature",
|
path: "usual.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
s, err := p.parseScenario()
|
s, err := p.parseScenario()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -51,7 +50,7 @@ func Test_parse_scenario_outline(t *testing.T) {
|
||||||
}
|
}
|
||||||
s.assertTitle("ls supports kinds of options", t)
|
s.assertTitle("ls supports kinds of options", t)
|
||||||
|
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
SCENARIO_OUTLINE,
|
SCENARIO_OUTLINE,
|
||||||
GIVEN,
|
GIVEN,
|
||||||
AND,
|
AND,
|
||||||
|
|
|
@ -86,7 +86,6 @@ func Test_parse_basic_given_step(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testStepSamples["given"])),
|
lx: newLexer(strings.NewReader(testStepSamples["given"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
steps, err := p.parseSteps()
|
steps, err := p.parseSteps()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -99,7 +98,7 @@ func Test_parse_basic_given_step(t *testing.T) {
|
||||||
steps[0].assertText("I'm a step", t)
|
steps[0].assertText("I'm a step", t)
|
||||||
|
|
||||||
p.next() // step over to eof
|
p.next() // step over to eof
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
GIVEN,
|
GIVEN,
|
||||||
EOF,
|
EOF,
|
||||||
}, t)
|
}, t)
|
||||||
|
@ -109,7 +108,6 @@ func Test_parse_step_with_comment(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testStepSamples["step_comment"])),
|
lx: newLexer(strings.NewReader(testStepSamples["step_comment"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
steps, err := p.parseSteps()
|
steps, err := p.parseSteps()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -123,7 +121,7 @@ func Test_parse_step_with_comment(t *testing.T) {
|
||||||
steps[0].assertComment("sets admin permissions", t)
|
steps[0].assertComment("sets admin permissions", t)
|
||||||
|
|
||||||
p.next() // step over to eof
|
p.next() // step over to eof
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
GIVEN,
|
GIVEN,
|
||||||
EOF,
|
EOF,
|
||||||
}, t)
|
}, t)
|
||||||
|
@ -133,7 +131,6 @@ func Test_parse_hash_table_given_step(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testStepSamples["given_table_hash"])),
|
lx: newLexer(strings.NewReader(testStepSamples["given_table_hash"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
steps, err := p.parseSteps()
|
steps, err := p.parseSteps()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -147,7 +144,7 @@ func Test_parse_hash_table_given_step(t *testing.T) {
|
||||||
steps[0].assertTableRow(t, 0, "name", "John Doe")
|
steps[0].assertTableRow(t, 0, "name", "John Doe")
|
||||||
|
|
||||||
p.next() // step over to eof
|
p.next() // step over to eof
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
GIVEN,
|
GIVEN,
|
||||||
TABLE_ROW,
|
TABLE_ROW,
|
||||||
EOF,
|
EOF,
|
||||||
|
@ -158,7 +155,6 @@ func Test_parse_table_given_step(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testStepSamples["given_table"])),
|
lx: newLexer(strings.NewReader(testStepSamples["given_table"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
steps, err := p.parseSteps()
|
steps, err := p.parseSteps()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -174,7 +170,7 @@ func Test_parse_table_given_step(t *testing.T) {
|
||||||
steps[0].assertTableRow(t, 2, "Jane", "Doe")
|
steps[0].assertTableRow(t, 2, "Jane", "Doe")
|
||||||
|
|
||||||
p.next() // step over to eof
|
p.next() // step over to eof
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
GIVEN,
|
GIVEN,
|
||||||
TABLE_ROW,
|
TABLE_ROW,
|
||||||
TABLE_ROW,
|
TABLE_ROW,
|
||||||
|
@ -187,7 +183,6 @@ func Test_parse_pystring_step(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testStepSamples["then_pystring"])),
|
lx: newLexer(strings.NewReader(testStepSamples["then_pystring"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
steps, err := p.parseSteps()
|
steps, err := p.parseSteps()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -204,7 +199,7 @@ func Test_parse_pystring_step(t *testing.T) {
|
||||||
}, "\n"), t)
|
}, "\n"), t)
|
||||||
|
|
||||||
p.next() // step over to eof
|
p.next() // step over to eof
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
THEN,
|
THEN,
|
||||||
PYSTRING,
|
PYSTRING,
|
||||||
TEXT,
|
TEXT,
|
||||||
|
@ -218,7 +213,6 @@ func Test_parse_empty_pystring_step(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testStepSamples["when_pystring_empty"])),
|
lx: newLexer(strings.NewReader(testStepSamples["when_pystring_empty"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
steps, err := p.parseSteps()
|
steps, err := p.parseSteps()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -232,7 +226,7 @@ func Test_parse_empty_pystring_step(t *testing.T) {
|
||||||
steps[0].assertPyString("", t)
|
steps[0].assertPyString("", t)
|
||||||
|
|
||||||
p.next() // step over to eof
|
p.next() // step over to eof
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
WHEN,
|
WHEN,
|
||||||
PYSTRING,
|
PYSTRING,
|
||||||
PYSTRING,
|
PYSTRING,
|
||||||
|
@ -244,13 +238,12 @@ func Test_parse_unclosed_pystring_step(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testStepSamples["when_pystring_unclosed"])),
|
lx: newLexer(strings.NewReader(testStepSamples["when_pystring_unclosed"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
_, err := p.parseSteps()
|
_, err := p.parseSteps()
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.Fatalf("expected an error, but got none")
|
t.Fatalf("expected an error, but got none")
|
||||||
}
|
}
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
WHEN,
|
WHEN,
|
||||||
PYSTRING,
|
PYSTRING,
|
||||||
TEXT,
|
TEXT,
|
||||||
|
@ -263,7 +256,6 @@ func Test_parse_step_group(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testStepSamples["step_group"])),
|
lx: newLexer(strings.NewReader(testStepSamples["step_group"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
steps, err := p.parseSteps()
|
steps, err := p.parseSteps()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -279,7 +271,7 @@ func Test_parse_step_group(t *testing.T) {
|
||||||
steps[3].assertText("something should happen", t)
|
steps[3].assertText("something should happen", t)
|
||||||
|
|
||||||
p.next() // step over to eof
|
p.next() // step over to eof
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
GIVEN,
|
GIVEN,
|
||||||
AND,
|
AND,
|
||||||
WHEN,
|
WHEN,
|
||||||
|
@ -292,7 +284,6 @@ func Test_parse_another_step_group(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: newLexer(strings.NewReader(testStepSamples["step_group_another"])),
|
lx: newLexer(strings.NewReader(testStepSamples["step_group_another"])),
|
||||||
path: "some.feature",
|
path: "some.feature",
|
||||||
ast: newAST(),
|
|
||||||
}
|
}
|
||||||
steps, err := p.parseSteps()
|
steps, err := p.parseSteps()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -308,7 +299,7 @@ func Test_parse_another_step_group(t *testing.T) {
|
||||||
steps[3].assertText("I expect the result", t)
|
steps[3].assertText("I expect the result", t)
|
||||||
|
|
||||||
p.next() // step over to eof
|
p.next() // step over to eof
|
||||||
p.ast.assertMatchesTypes([]TokenType{
|
p.assertMatchesTypes([]TokenType{
|
||||||
GIVEN,
|
GIVEN,
|
||||||
AND,
|
AND,
|
||||||
WHEN,
|
WHEN,
|
||||||
|
|
Загрузка…
Создание таблицы
Сослаться в новой задаче