Этот коммит содержится в:
gedi 2015-06-10 12:58:26 +03:00
родитель 89cf447c8d
коммит a3ce373a43
6 изменённых файлов: 94 добавлений и 46 удалений

Просмотреть файл

@ -37,12 +37,14 @@ func (l *Lexer) Next() *Token {
}
// comment
if m := matchers["comment"].FindStringSubmatch(line); len(m) > 0 {
comment := strings.TrimSpace(m[2])
return &Token{
Type: COMMENT,
Indent: len(m[1]),
Line: l.lines - 1,
Value: m[2],
Text: line,
Type: COMMENT,
Indent: len(m[1]),
Line: l.lines - 1,
Value: comment,
Text: line,
Comment: comment,
}
}
// pystring
@ -57,10 +59,11 @@ func (l *Lexer) Next() *Token {
// step
if m := matchers["step"].FindStringSubmatch(line); len(m) > 0 {
tok := &Token{
Indent: len(m[1]),
Line: l.lines - 1,
Value: m[3],
Text: line,
Indent: len(m[1]),
Line: l.lines - 1,
Value: strings.TrimSpace(m[3]),
Text: line,
Comment: strings.Trim(m[4], " #"),
}
switch m[2] {
case "Given":
@ -79,50 +82,55 @@ func (l *Lexer) Next() *Token {
// scenario
if m := matchers["scenario"].FindStringSubmatch(line); len(m) > 0 {
return &Token{
Type: SCENARIO,
Indent: len(m[1]),
Line: l.lines - 1,
Value: m[2],
Text: line,
Type: SCENARIO,
Indent: len(m[1]),
Line: l.lines - 1,
Value: strings.TrimSpace(m[2]),
Text: line,
Comment: strings.Trim(m[3], " #"),
}
}
// background
if m := matchers["background"].FindStringSubmatch(line); len(m) > 0 {
return &Token{
Type: BACKGROUND,
Indent: len(m[1]),
Line: l.lines - 1,
Text: line,
Type: BACKGROUND,
Indent: len(m[1]),
Line: l.lines - 1,
Text: line,
Comment: strings.Trim(m[2], " #"),
}
}
// feature
if m := matchers["feature"].FindStringSubmatch(line); len(m) > 0 {
return &Token{
Type: FEATURE,
Indent: len(m[1]),
Line: l.lines - 1,
Value: m[2],
Text: line,
Type: FEATURE,
Indent: len(m[1]),
Line: l.lines - 1,
Value: strings.TrimSpace(m[2]),
Text: line,
Comment: strings.Trim(m[3], " #"),
}
}
// tags
if m := matchers["tags"].FindStringSubmatch(line); len(m) > 0 {
return &Token{
Type: TAGS,
Indent: len(m[1]),
Line: l.lines - 1,
Value: m[2],
Text: line,
Type: TAGS,
Indent: len(m[1]),
Line: l.lines - 1,
Value: strings.TrimSpace(m[2]),
Text: line,
Comment: strings.Trim(m[3], " #"),
}
}
// table row
if m := matchers["table_row"].FindStringSubmatch(line); len(m) > 0 {
return &Token{
Type: TABLE_ROW,
Indent: len(m[1]),
Line: l.lines - 1,
Value: m[2],
Text: line,
Type: TABLE_ROW,
Indent: len(m[1]),
Line: l.lines - 1,
Value: strings.TrimSpace(m[2]),
Text: line,
Comment: strings.Trim(m[3], " #"),
}
}
// text

Просмотреть файл

@ -179,7 +179,7 @@ func Test_table_row_reading(t *testing.T) {
t.Fatalf("expected token indentation '%d' at position: %d, is not the same as actual: '%d'", expectedIndents[i], i, indents[i])
}
}
if values[2] != "| name | lastname | num |" {
if values[2] != "name | lastname | num |" {
t.Fatalf("table row value '%s' was not expected", values[2])
}
}

Просмотреть файл

@ -3,12 +3,12 @@ package lexer
import "regexp"
var matchers = map[string]*regexp.Regexp{
"feature": regexp.MustCompile("^(\\s*)Feature:\\s*(.*)"),
"scenario": regexp.MustCompile("^(\\s*)Scenario:\\s*(.*)"),
"background": regexp.MustCompile("^(\\s*)Background:"),
"step": regexp.MustCompile("^(\\s*)(Given|When|Then|And|But)\\s+(.+)"),
"feature": regexp.MustCompile("^(\\s*)Feature:\\s*([^#]*)(#.*)?"),
"scenario": regexp.MustCompile("^(\\s*)Scenario:\\s*([^#]*)(#.*)?"),
"background": regexp.MustCompile("^(\\s*)Background:(\\s*#.*)?"),
"step": regexp.MustCompile("^(\\s*)(Given|When|Then|And|But)\\s+([^#]*)(#.*)?"),
"comment": regexp.MustCompile("^(\\s*)#(.+)"),
"pystring": regexp.MustCompile("^(\\s*)\\\"\\\"\\\""),
"tags": regexp.MustCompile("^(\\s*)(@.+)"),
"table_row": regexp.MustCompile("^(\\s*)(\\|.+)"),
"tags": regexp.MustCompile("^(\\s*)@([^#]*)(#.*)?"),
"table_row": regexp.MustCompile("^(\\s*)\\|([^#]*)(#.*)?"),
}

Просмотреть файл

@ -5,6 +5,7 @@ type Token struct {
Line, Indent int // line and indentation number
Value string // interpreted value
Text string // same text as read
Comment string // a comment
}
func (t *Token) OfType(all ...TokenType) bool {

Просмотреть файл

@ -24,13 +24,15 @@ func (t Tags) Has(tag Tag) bool {
}
type Scenario struct {
Title string
Steps []*Step
Tags Tags
Title string
Steps []*Step
Tags Tags
Comment string
}
type Background struct {
Steps []*Step
Steps []*Step
Comment string
}
type StepType string
@ -43,6 +45,7 @@ const (
type Step struct {
Text string
Comment string
Type StepType
PyString *PyString
Table *Table
@ -56,6 +59,7 @@ type Feature struct {
Background *Background
Scenarios []*Scenario
AST *AST
Comment string
}
type PyString struct {
@ -140,6 +144,7 @@ func (p *parser) parseFeature() (ft *Feature, err error) {
return ft, p.err("expected a file to begin with a feature definition, but got '"+tok.Type.String()+"' instead", tok.Line)
}
ft.Title = tok.Value
ft.Comment = tok.Comment
var desc []string
for ; p.peek().Type == lexer.TEXT; tok = p.next() {
@ -154,7 +159,7 @@ func (p *parser) parseFeature() (ft *Feature, err error) {
return ft, p.err("there can only be a single background section, but found another", tok.Line)
}
ft.Background = &Background{}
ft.Background = &Background{Comment: tok.Comment}
p.next() // jump to background steps
if ft.Background.Steps, err = p.parseSteps(); err != nil {
return ft, err
@ -179,6 +184,7 @@ func (p *parser) parseFeature() (ft *Feature, err error) {
}
sc.Title = tok.Value
sc.Comment = tok.Comment
p.next() // jump to scenario steps
if sc.Steps, err = p.parseSteps(); err != nil {
return ft, err
@ -191,7 +197,7 @@ func (p *parser) parseFeature() (ft *Feature, err error) {
func (p *parser) parseSteps() (steps []*Step, err error) {
for tok := p.peek(); tok.OfType(allSteps...); tok = p.peek() {
step := &Step{Text: tok.Value}
step := &Step{Text: tok.Value, Comment: tok.Comment}
switch tok.Type {
case lexer.GIVEN:
step.Type = Given

Просмотреть файл

@ -13,6 +13,8 @@ var testStepSamples = map[string]string{
"given_table_hash": `Given there are users:
| name | John Doe |`,
"step_comment": `Given I'm an admin # sets admin permissions`,
"given_table": `Given there are users:
| name | lastname |
| John | Doe |
@ -65,6 +67,12 @@ func (s *Step) assertPyString(text string, t *testing.T) {
}
}
func (s *Step) assertComment(comment string, t *testing.T) {
if s.Comment != comment {
t.Fatalf("expected step '%s' comment to be '%s', but got '%s'", s.Text, comment, s.Comment)
}
}
func (s *Step) assertTableRow(t *testing.T, num int, cols ...string) {
if s.Table == nil {
t.Fatalf("step '%s %s' has no table", s.Type, s.Text)
@ -106,6 +114,31 @@ func Test_parse_basic_given_step(t *testing.T) {
}, t)
}
func Test_parse_step_with_comment(t *testing.T) {
p := &parser{
lx: lexer.New(strings.NewReader(testStepSamples["step_comment"])),
path: "some.feature",
ast: newAST(),
}
steps, err := p.parseSteps()
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
if len(steps) != 1 {
t.Fatalf("expected one step to be parsed")
}
steps[0].assertType(Given, t)
steps[0].assertText("I'm an admin", t)
steps[0].assertComment("sets admin permissions", t)
p.next() // step over to eof
p.ast.assertMatchesTypes([]lexer.TokenType{
lexer.GIVEN,
lexer.EOF,
}, t)
}
func Test_parse_hash_table_given_step(t *testing.T) {
p := &parser{
lx: lexer.New(strings.NewReader(testStepSamples["given_table_hash"])),