add general step parsing tests
Этот коммит содержится в:
родитель
2cc517654e
коммит
33805bca2b
6 изменённых файлов: 322 добавлений и 26 удалений
|
@ -66,6 +66,7 @@ func (l *Lexer) read() *Token {
|
|||
Indent: len(m[1]),
|
||||
Line: l.lines - 1,
|
||||
Value: m[2],
|
||||
Text: line,
|
||||
}
|
||||
}
|
||||
// pystring
|
||||
|
@ -74,6 +75,7 @@ func (l *Lexer) read() *Token {
|
|||
Type: PYSTRING,
|
||||
Indent: len(m[1]),
|
||||
Line: l.lines - 1,
|
||||
Text: line,
|
||||
}
|
||||
}
|
||||
// step
|
||||
|
@ -82,6 +84,7 @@ func (l *Lexer) read() *Token {
|
|||
Indent: len(m[1]),
|
||||
Line: l.lines - 1,
|
||||
Value: m[3],
|
||||
Text: line,
|
||||
}
|
||||
switch m[2] {
|
||||
case "Given":
|
||||
|
@ -104,6 +107,7 @@ func (l *Lexer) read() *Token {
|
|||
Indent: len(m[1]),
|
||||
Line: l.lines - 1,
|
||||
Value: m[2],
|
||||
Text: line,
|
||||
}
|
||||
}
|
||||
// background
|
||||
|
@ -112,6 +116,7 @@ func (l *Lexer) read() *Token {
|
|||
Type: BACKGROUND,
|
||||
Indent: len(m[1]),
|
||||
Line: l.lines - 1,
|
||||
Text: line,
|
||||
}
|
||||
}
|
||||
// feature
|
||||
|
@ -121,6 +126,7 @@ func (l *Lexer) read() *Token {
|
|||
Indent: len(m[1]),
|
||||
Line: l.lines - 1,
|
||||
Value: m[2],
|
||||
Text: line,
|
||||
}
|
||||
}
|
||||
// tags
|
||||
|
@ -130,6 +136,7 @@ func (l *Lexer) read() *Token {
|
|||
Indent: len(m[1]),
|
||||
Line: l.lines - 1,
|
||||
Value: m[2],
|
||||
Text: line,
|
||||
}
|
||||
}
|
||||
// table row
|
||||
|
@ -139,6 +146,7 @@ func (l *Lexer) read() *Token {
|
|||
Indent: len(m[1]),
|
||||
Line: l.lines - 1,
|
||||
Value: m[2],
|
||||
Text: line,
|
||||
}
|
||||
}
|
||||
// text
|
||||
|
@ -148,5 +156,6 @@ func (l *Lexer) read() *Token {
|
|||
Line: l.lines - 1,
|
||||
Value: text,
|
||||
Indent: len(line) - len(text),
|
||||
Text: line,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
package lexer
|
||||
|
||||
type Token struct {
|
||||
Type TokenType
|
||||
Line, Indent int
|
||||
Value string
|
||||
Type TokenType // type of token
|
||||
Line, Indent int // line and indentation number
|
||||
Value string // interpreted value
|
||||
Text string // same text as read
|
||||
}
|
||||
|
||||
func (t *Token) OfType(all ...TokenType) bool {
|
||||
|
|
|
@ -171,10 +171,9 @@ func (p *parser) parseFeature() (ft *Feature, err error) {
|
|||
return ft, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseSteps() ([]*Step, error) {
|
||||
var steps []*Step
|
||||
var tok *lexer.Token
|
||||
for ; p.peek().OfType(allSteps...); tok = p.next() {
|
||||
func (p *parser) parseSteps() (steps []*Step, err error) {
|
||||
for tok := p.peek(); tok.OfType(allSteps...); tok = p.peek() {
|
||||
p.next() // move over the step
|
||||
step := &Step{Text: tok.Value}
|
||||
switch tok.Type {
|
||||
case lexer.GIVEN:
|
||||
|
@ -183,14 +182,14 @@ func (p *parser) parseSteps() ([]*Step, error) {
|
|||
step.Type = When
|
||||
case lexer.THEN:
|
||||
step.Type = Then
|
||||
case lexer.AND:
|
||||
case lexer.BUT:
|
||||
case lexer.AND, lexer.BUT:
|
||||
if len(steps) > 0 {
|
||||
step.Type = steps[len(steps)-1].Type
|
||||
} else {
|
||||
step.Type = Given
|
||||
}
|
||||
}
|
||||
// step text maybe multilined
|
||||
for ; p.peek().OfType(lexer.TEXT); tok = p.next() {
|
||||
step.Text += " " + tok.Value
|
||||
}
|
||||
|
@ -219,14 +218,14 @@ func (p *parser) parseSteps() ([]*Step, error) {
|
|||
func (p *parser) parsePystring(s *Step) error {
|
||||
var tok *lexer.Token
|
||||
started := p.next() // skip the start of pystring
|
||||
text := ""
|
||||
var lines []string
|
||||
for tok = p.next(); !tok.OfType(lexer.EOF, lexer.PYSTRING); tok = p.next() {
|
||||
text += strings.Repeat(" ", tok.Indent) + tok.Value
|
||||
lines = append(lines, tok.Text)
|
||||
}
|
||||
if tok.Type == lexer.EOF {
|
||||
return fmt.Errorf("pystring which was opened on %s:%d was not closed", p.path, started.Line)
|
||||
}
|
||||
s.PyString = &PyString{Body: text}
|
||||
s.PyString = &PyString{Body: strings.Join(lines, "\n")}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -234,7 +233,7 @@ func (p *parser) parseTable(s *Step) error {
|
|||
s.Table = &Table{}
|
||||
for row := p.peek(); row.Type == lexer.TABLE_ROW; row = p.peek() {
|
||||
var cols []string
|
||||
for _, r := range strings.Split(row.Value, "|") {
|
||||
for _, r := range strings.Split(strings.Trim(row.Value, "|"), "|") {
|
||||
cols = append(cols, strings.TrimFunc(r, unicode.IsSpace))
|
||||
}
|
||||
// ensure the same colum number for each row
|
||||
|
|
|
@ -20,19 +20,6 @@ var testFeatureSamples = map[string]string{
|
|||
Feature: gherkin`,
|
||||
}
|
||||
|
||||
func (a *AST) assertMatchesTypes(expected []lexer.TokenType, t *testing.T) {
|
||||
key := -1
|
||||
for item := a.head; item != nil; item = item.next {
|
||||
key += 1
|
||||
if expected[key] != item.value.Type {
|
||||
t.Fatalf("expected ast token '%s', but got '%s' at position: %d", expected[key], item.value.Type, key)
|
||||
}
|
||||
}
|
||||
if len(expected)-1 != key {
|
||||
t.Fatalf("expected ast length %d, does not match actual: %d", len(expected), key+1)
|
||||
}
|
||||
}
|
||||
|
||||
func Test_parse_normal_feature(t *testing.T) {
|
||||
p := &parser{
|
||||
lx: lexer.New(strings.NewReader(testFeatureSamples["full"])),
|
||||
|
|
275
gherkin/parse_steps_test.go
Обычный файл
275
gherkin/parse_steps_test.go
Обычный файл
|
@ -0,0 +1,275 @@
|
|||
package gherkin
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/l3pp4rd/go-behat/gherkin/lexer"
|
||||
)
|
||||
|
||||
var testStepSamples = map[string]string{
|
||||
"given": indent(4, `Given I'm a step`),
|
||||
|
||||
"given_table_hash": `Given there are users:
|
||||
| name | John Doe |`,
|
||||
|
||||
"given_table": `Given there are users:
|
||||
| name | lastname |
|
||||
| John | Doe |
|
||||
| Jane | Doe |`,
|
||||
|
||||
"then_pystring": `Then there should be text:
|
||||
"""
|
||||
Some text
|
||||
And more
|
||||
"""`,
|
||||
|
||||
"when_pystring_empty": `When I do request with body:
|
||||
"""
|
||||
"""`,
|
||||
|
||||
"when_pystring_unclosed": `When I do request with body:
|
||||
"""
|
||||
{"json": "data"}
|
||||
""`,
|
||||
|
||||
"step_group": `Given there are conditions
|
||||
And there are more conditions
|
||||
When I do something
|
||||
Then something should happen`,
|
||||
|
||||
"step_group_multiline": `Given: an admin user "John Doe"
|
||||
And user "John Doe" belongs
|
||||
to user group "editors"
|
||||
When I do something
|
||||
Then I expect the result`,
|
||||
}
|
||||
|
||||
func (s *Step) assertType(typ StepType, t *testing.T) {
|
||||
if s.Type != typ {
|
||||
t.Fatalf("expected step '%s' type to be '%s', but got '%s'", s.Text, typ, s.Type)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Step) assertText(text string, t *testing.T) {
|
||||
if s.Text != text {
|
||||
t.Fatalf("expected step text to be '%s', but got '%s'", text, s.Text)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Step) assertPyString(text string, t *testing.T) {
|
||||
if s.PyString == nil {
|
||||
t.Fatalf("step '%s %s' has no pystring", s.Type, s.Text)
|
||||
}
|
||||
if s.PyString.Body != text {
|
||||
t.Fatalf("expected step pystring body to be '%s', but got '%s'", text, s.PyString.Body)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Step) assertTableRow(t *testing.T, num int, cols ...string) {
|
||||
if s.Table == nil {
|
||||
t.Fatalf("step '%s %s' has no table", s.Type, s.Text)
|
||||
}
|
||||
if len(s.Table.rows) <= num {
|
||||
t.Fatalf("step '%s %s' table has no row: %d", s.Type, s.Text, num)
|
||||
}
|
||||
if len(s.Table.rows[num]) != len(cols) {
|
||||
t.Fatalf("step '%s %s' table row length, does not match expected: %d", s.Type, s.Text, len(cols))
|
||||
}
|
||||
for i, col := range s.Table.rows[num] {
|
||||
if col != cols[i] {
|
||||
t.Fatalf("step '%s %s' table row %d, column %d - value '%s', does not match expected: %s", s.Type, s.Text, num, i, col, cols[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test_parse_basic_given_step(t *testing.T) {
|
||||
p := &parser{
|
||||
lx: lexer.New(strings.NewReader(testStepSamples["given"])),
|
||||
path: "some.feature",
|
||||
ast: newAST(),
|
||||
}
|
||||
steps, err := p.parseSteps()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if len(steps) != 1 {
|
||||
t.Fatalf("expected one step to be parsed")
|
||||
}
|
||||
|
||||
steps[0].assertType(Given, t)
|
||||
steps[0].assertText("I'm a step", t)
|
||||
|
||||
p.next() // step over to eof
|
||||
p.ast.assertMatchesTypes([]lexer.TokenType{
|
||||
lexer.GIVEN,
|
||||
lexer.EOF,
|
||||
}, t)
|
||||
}
|
||||
|
||||
func Test_parse_hash_table_given_step(t *testing.T) {
|
||||
p := &parser{
|
||||
lx: lexer.New(strings.NewReader(testStepSamples["given_table_hash"])),
|
||||
path: "some.feature",
|
||||
ast: newAST(),
|
||||
}
|
||||
steps, err := p.parseSteps()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if len(steps) != 1 {
|
||||
t.Fatalf("expected one step to be parsed")
|
||||
}
|
||||
|
||||
steps[0].assertType(Given, t)
|
||||
steps[0].assertText("there are users:", t)
|
||||
steps[0].assertTableRow(t, 0, "name", "John Doe")
|
||||
|
||||
p.next() // step over to eof
|
||||
p.ast.assertMatchesTypes([]lexer.TokenType{
|
||||
lexer.GIVEN,
|
||||
lexer.TABLE_ROW,
|
||||
lexer.EOF,
|
||||
}, t)
|
||||
}
|
||||
|
||||
func Test_parse_table_given_step(t *testing.T) {
|
||||
p := &parser{
|
||||
lx: lexer.New(strings.NewReader(testStepSamples["given_table"])),
|
||||
path: "some.feature",
|
||||
ast: newAST(),
|
||||
}
|
||||
steps, err := p.parseSteps()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if len(steps) != 1 {
|
||||
t.Fatalf("expected one step to be parsed")
|
||||
}
|
||||
|
||||
steps[0].assertType(Given, t)
|
||||
steps[0].assertText("there are users:", t)
|
||||
steps[0].assertTableRow(t, 0, "name", "lastname")
|
||||
steps[0].assertTableRow(t, 1, "John", "Doe")
|
||||
steps[0].assertTableRow(t, 2, "Jane", "Doe")
|
||||
|
||||
p.next() // step over to eof
|
||||
p.ast.assertMatchesTypes([]lexer.TokenType{
|
||||
lexer.GIVEN,
|
||||
lexer.TABLE_ROW,
|
||||
lexer.TABLE_ROW,
|
||||
lexer.TABLE_ROW,
|
||||
lexer.EOF,
|
||||
}, t)
|
||||
}
|
||||
|
||||
func Test_parse_pystring_step(t *testing.T) {
|
||||
p := &parser{
|
||||
lx: lexer.New(strings.NewReader(testStepSamples["then_pystring"])),
|
||||
path: "some.feature",
|
||||
ast: newAST(),
|
||||
}
|
||||
steps, err := p.parseSteps()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if len(steps) != 1 {
|
||||
t.Fatalf("expected one step to be parsed")
|
||||
}
|
||||
|
||||
steps[0].assertType(Then, t)
|
||||
steps[0].assertText("there should be text:", t)
|
||||
steps[0].assertPyString(strings.Join([]string{
|
||||
indent(4, "Some text"),
|
||||
indent(4, "And more"),
|
||||
}, "\n"), t)
|
||||
|
||||
p.next() // step over to eof
|
||||
p.ast.assertMatchesTypes([]lexer.TokenType{
|
||||
lexer.THEN,
|
||||
lexer.PYSTRING,
|
||||
lexer.TEXT,
|
||||
lexer.AND, // we do not care what we parse inside PYSTRING even if its whole behat feature text
|
||||
lexer.PYSTRING,
|
||||
lexer.EOF,
|
||||
}, t)
|
||||
}
|
||||
|
||||
func Test_parse_empty_pystring_step(t *testing.T) {
|
||||
p := &parser{
|
||||
lx: lexer.New(strings.NewReader(testStepSamples["when_pystring_empty"])),
|
||||
path: "some.feature",
|
||||
ast: newAST(),
|
||||
}
|
||||
steps, err := p.parseSteps()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if len(steps) != 1 {
|
||||
t.Fatalf("expected one step to be parsed")
|
||||
}
|
||||
|
||||
steps[0].assertType(When, t)
|
||||
steps[0].assertText("I do request with body:", t)
|
||||
steps[0].assertPyString("", t)
|
||||
|
||||
p.next() // step over to eof
|
||||
p.ast.assertMatchesTypes([]lexer.TokenType{
|
||||
lexer.WHEN,
|
||||
lexer.PYSTRING,
|
||||
lexer.PYSTRING,
|
||||
lexer.EOF,
|
||||
}, t)
|
||||
}
|
||||
|
||||
func Test_parse_unclosed_pystring_step(t *testing.T) {
|
||||
p := &parser{
|
||||
lx: lexer.New(strings.NewReader(testStepSamples["when_pystring_unclosed"])),
|
||||
path: "some.feature",
|
||||
ast: newAST(),
|
||||
}
|
||||
_, err := p.parseSteps()
|
||||
if err == nil {
|
||||
t.Fatalf("expected an error, but got none")
|
||||
}
|
||||
p.ast.assertMatchesTypes([]lexer.TokenType{
|
||||
lexer.WHEN,
|
||||
lexer.PYSTRING,
|
||||
lexer.TEXT,
|
||||
lexer.TEXT,
|
||||
lexer.EOF,
|
||||
}, t)
|
||||
}
|
||||
|
||||
func Test_parse_step_group(t *testing.T) {
|
||||
p := &parser{
|
||||
lx: lexer.New(strings.NewReader(testStepSamples["step_group"])),
|
||||
path: "some.feature",
|
||||
ast: newAST(),
|
||||
}
|
||||
steps, err := p.parseSteps()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
if len(steps) != 4 {
|
||||
t.Fatalf("expected four steps to be parsed, but got: %d", len(steps))
|
||||
}
|
||||
|
||||
steps[0].assertType(Given, t)
|
||||
steps[0].assertText("there are conditions", t)
|
||||
steps[1].assertType(Given, t)
|
||||
steps[1].assertText("there are more conditions", t)
|
||||
steps[2].assertType(When, t)
|
||||
steps[2].assertText("I do something", t)
|
||||
steps[3].assertType(Then, t)
|
||||
steps[3].assertText("something should happen", t)
|
||||
|
||||
p.next() // step over to eof
|
||||
p.ast.assertMatchesTypes([]lexer.TokenType{
|
||||
lexer.GIVEN,
|
||||
lexer.AND,
|
||||
lexer.WHEN,
|
||||
lexer.THEN,
|
||||
lexer.EOF,
|
||||
}, t)
|
||||
}
|
25
gherkin/util_test.go
Обычный файл
25
gherkin/util_test.go
Обычный файл
|
@ -0,0 +1,25 @@
|
|||
package gherkin
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/l3pp4rd/go-behat/gherkin/lexer"
|
||||
)
|
||||
|
||||
func (a *AST) assertMatchesTypes(expected []lexer.TokenType, t *testing.T) {
|
||||
key := -1
|
||||
for item := a.head; item != nil; item = item.next {
|
||||
key += 1
|
||||
if expected[key] != item.value.Type {
|
||||
t.Fatalf("expected ast token '%s', but got '%s' at position: %d", expected[key], item.value.Type, key)
|
||||
}
|
||||
}
|
||||
if len(expected)-1 != key {
|
||||
t.Fatalf("expected ast length %d, does not match actual: %d", len(expected), key+1)
|
||||
}
|
||||
}
|
||||
|
||||
func indent(n int, s string) string {
|
||||
return strings.Repeat(" ", n) + s
|
||||
}
|
Загрузка…
Создание таблицы
Сослаться в новой задаче