support token comments
Этот коммит содержится в:
родитель
89cf447c8d
коммит
a3ce373a43
6 изменённых файлов: 94 добавлений и 46 удалений
|
@ -37,12 +37,14 @@ func (l *Lexer) Next() *Token {
|
||||||
}
|
}
|
||||||
// comment
|
// comment
|
||||||
if m := matchers["comment"].FindStringSubmatch(line); len(m) > 0 {
|
if m := matchers["comment"].FindStringSubmatch(line); len(m) > 0 {
|
||||||
|
comment := strings.TrimSpace(m[2])
|
||||||
return &Token{
|
return &Token{
|
||||||
Type: COMMENT,
|
Type: COMMENT,
|
||||||
Indent: len(m[1]),
|
Indent: len(m[1]),
|
||||||
Line: l.lines - 1,
|
Line: l.lines - 1,
|
||||||
Value: m[2],
|
Value: comment,
|
||||||
Text: line,
|
Text: line,
|
||||||
|
Comment: comment,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// pystring
|
// pystring
|
||||||
|
@ -57,10 +59,11 @@ func (l *Lexer) Next() *Token {
|
||||||
// step
|
// step
|
||||||
if m := matchers["step"].FindStringSubmatch(line); len(m) > 0 {
|
if m := matchers["step"].FindStringSubmatch(line); len(m) > 0 {
|
||||||
tok := &Token{
|
tok := &Token{
|
||||||
Indent: len(m[1]),
|
Indent: len(m[1]),
|
||||||
Line: l.lines - 1,
|
Line: l.lines - 1,
|
||||||
Value: m[3],
|
Value: strings.TrimSpace(m[3]),
|
||||||
Text: line,
|
Text: line,
|
||||||
|
Comment: strings.Trim(m[4], " #"),
|
||||||
}
|
}
|
||||||
switch m[2] {
|
switch m[2] {
|
||||||
case "Given":
|
case "Given":
|
||||||
|
@ -79,50 +82,55 @@ func (l *Lexer) Next() *Token {
|
||||||
// scenario
|
// scenario
|
||||||
if m := matchers["scenario"].FindStringSubmatch(line); len(m) > 0 {
|
if m := matchers["scenario"].FindStringSubmatch(line); len(m) > 0 {
|
||||||
return &Token{
|
return &Token{
|
||||||
Type: SCENARIO,
|
Type: SCENARIO,
|
||||||
Indent: len(m[1]),
|
Indent: len(m[1]),
|
||||||
Line: l.lines - 1,
|
Line: l.lines - 1,
|
||||||
Value: m[2],
|
Value: strings.TrimSpace(m[2]),
|
||||||
Text: line,
|
Text: line,
|
||||||
|
Comment: strings.Trim(m[3], " #"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// background
|
// background
|
||||||
if m := matchers["background"].FindStringSubmatch(line); len(m) > 0 {
|
if m := matchers["background"].FindStringSubmatch(line); len(m) > 0 {
|
||||||
return &Token{
|
return &Token{
|
||||||
Type: BACKGROUND,
|
Type: BACKGROUND,
|
||||||
Indent: len(m[1]),
|
Indent: len(m[1]),
|
||||||
Line: l.lines - 1,
|
Line: l.lines - 1,
|
||||||
Text: line,
|
Text: line,
|
||||||
|
Comment: strings.Trim(m[2], " #"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// feature
|
// feature
|
||||||
if m := matchers["feature"].FindStringSubmatch(line); len(m) > 0 {
|
if m := matchers["feature"].FindStringSubmatch(line); len(m) > 0 {
|
||||||
return &Token{
|
return &Token{
|
||||||
Type: FEATURE,
|
Type: FEATURE,
|
||||||
Indent: len(m[1]),
|
Indent: len(m[1]),
|
||||||
Line: l.lines - 1,
|
Line: l.lines - 1,
|
||||||
Value: m[2],
|
Value: strings.TrimSpace(m[2]),
|
||||||
Text: line,
|
Text: line,
|
||||||
|
Comment: strings.Trim(m[3], " #"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// tags
|
// tags
|
||||||
if m := matchers["tags"].FindStringSubmatch(line); len(m) > 0 {
|
if m := matchers["tags"].FindStringSubmatch(line); len(m) > 0 {
|
||||||
return &Token{
|
return &Token{
|
||||||
Type: TAGS,
|
Type: TAGS,
|
||||||
Indent: len(m[1]),
|
Indent: len(m[1]),
|
||||||
Line: l.lines - 1,
|
Line: l.lines - 1,
|
||||||
Value: m[2],
|
Value: strings.TrimSpace(m[2]),
|
||||||
Text: line,
|
Text: line,
|
||||||
|
Comment: strings.Trim(m[3], " #"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// table row
|
// table row
|
||||||
if m := matchers["table_row"].FindStringSubmatch(line); len(m) > 0 {
|
if m := matchers["table_row"].FindStringSubmatch(line); len(m) > 0 {
|
||||||
return &Token{
|
return &Token{
|
||||||
Type: TABLE_ROW,
|
Type: TABLE_ROW,
|
||||||
Indent: len(m[1]),
|
Indent: len(m[1]),
|
||||||
Line: l.lines - 1,
|
Line: l.lines - 1,
|
||||||
Value: m[2],
|
Value: strings.TrimSpace(m[2]),
|
||||||
Text: line,
|
Text: line,
|
||||||
|
Comment: strings.Trim(m[3], " #"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// text
|
// text
|
||||||
|
|
|
@ -179,7 +179,7 @@ func Test_table_row_reading(t *testing.T) {
|
||||||
t.Fatalf("expected token indentation '%d' at position: %d, is not the same as actual: '%d'", expectedIndents[i], i, indents[i])
|
t.Fatalf("expected token indentation '%d' at position: %d, is not the same as actual: '%d'", expectedIndents[i], i, indents[i])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if values[2] != "| name | lastname | num |" {
|
if values[2] != "name | lastname | num |" {
|
||||||
t.Fatalf("table row value '%s' was not expected", values[2])
|
t.Fatalf("table row value '%s' was not expected", values[2])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,12 +3,12 @@ package lexer
|
||||||
import "regexp"
|
import "regexp"
|
||||||
|
|
||||||
var matchers = map[string]*regexp.Regexp{
|
var matchers = map[string]*regexp.Regexp{
|
||||||
"feature": regexp.MustCompile("^(\\s*)Feature:\\s*(.*)"),
|
"feature": regexp.MustCompile("^(\\s*)Feature:\\s*([^#]*)(#.*)?"),
|
||||||
"scenario": regexp.MustCompile("^(\\s*)Scenario:\\s*(.*)"),
|
"scenario": regexp.MustCompile("^(\\s*)Scenario:\\s*([^#]*)(#.*)?"),
|
||||||
"background": regexp.MustCompile("^(\\s*)Background:"),
|
"background": regexp.MustCompile("^(\\s*)Background:(\\s*#.*)?"),
|
||||||
"step": regexp.MustCompile("^(\\s*)(Given|When|Then|And|But)\\s+(.+)"),
|
"step": regexp.MustCompile("^(\\s*)(Given|When|Then|And|But)\\s+([^#]*)(#.*)?"),
|
||||||
"comment": regexp.MustCompile("^(\\s*)#(.+)"),
|
"comment": regexp.MustCompile("^(\\s*)#(.+)"),
|
||||||
"pystring": regexp.MustCompile("^(\\s*)\\\"\\\"\\\""),
|
"pystring": regexp.MustCompile("^(\\s*)\\\"\\\"\\\""),
|
||||||
"tags": regexp.MustCompile("^(\\s*)(@.+)"),
|
"tags": regexp.MustCompile("^(\\s*)@([^#]*)(#.*)?"),
|
||||||
"table_row": regexp.MustCompile("^(\\s*)(\\|.+)"),
|
"table_row": regexp.MustCompile("^(\\s*)\\|([^#]*)(#.*)?"),
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ type Token struct {
|
||||||
Line, Indent int // line and indentation number
|
Line, Indent int // line and indentation number
|
||||||
Value string // interpreted value
|
Value string // interpreted value
|
||||||
Text string // same text as read
|
Text string // same text as read
|
||||||
|
Comment string // a comment
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *Token) OfType(all ...TokenType) bool {
|
func (t *Token) OfType(all ...TokenType) bool {
|
||||||
|
|
|
@ -24,13 +24,15 @@ func (t Tags) Has(tag Tag) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Scenario struct {
|
type Scenario struct {
|
||||||
Title string
|
Title string
|
||||||
Steps []*Step
|
Steps []*Step
|
||||||
Tags Tags
|
Tags Tags
|
||||||
|
Comment string
|
||||||
}
|
}
|
||||||
|
|
||||||
type Background struct {
|
type Background struct {
|
||||||
Steps []*Step
|
Steps []*Step
|
||||||
|
Comment string
|
||||||
}
|
}
|
||||||
|
|
||||||
type StepType string
|
type StepType string
|
||||||
|
@ -43,6 +45,7 @@ const (
|
||||||
|
|
||||||
type Step struct {
|
type Step struct {
|
||||||
Text string
|
Text string
|
||||||
|
Comment string
|
||||||
Type StepType
|
Type StepType
|
||||||
PyString *PyString
|
PyString *PyString
|
||||||
Table *Table
|
Table *Table
|
||||||
|
@ -56,6 +59,7 @@ type Feature struct {
|
||||||
Background *Background
|
Background *Background
|
||||||
Scenarios []*Scenario
|
Scenarios []*Scenario
|
||||||
AST *AST
|
AST *AST
|
||||||
|
Comment string
|
||||||
}
|
}
|
||||||
|
|
||||||
type PyString struct {
|
type PyString struct {
|
||||||
|
@ -140,6 +144,7 @@ func (p *parser) parseFeature() (ft *Feature, err error) {
|
||||||
return ft, p.err("expected a file to begin with a feature definition, but got '"+tok.Type.String()+"' instead", tok.Line)
|
return ft, p.err("expected a file to begin with a feature definition, but got '"+tok.Type.String()+"' instead", tok.Line)
|
||||||
}
|
}
|
||||||
ft.Title = tok.Value
|
ft.Title = tok.Value
|
||||||
|
ft.Comment = tok.Comment
|
||||||
|
|
||||||
var desc []string
|
var desc []string
|
||||||
for ; p.peek().Type == lexer.TEXT; tok = p.next() {
|
for ; p.peek().Type == lexer.TEXT; tok = p.next() {
|
||||||
|
@ -154,7 +159,7 @@ func (p *parser) parseFeature() (ft *Feature, err error) {
|
||||||
return ft, p.err("there can only be a single background section, but found another", tok.Line)
|
return ft, p.err("there can only be a single background section, but found another", tok.Line)
|
||||||
}
|
}
|
||||||
|
|
||||||
ft.Background = &Background{}
|
ft.Background = &Background{Comment: tok.Comment}
|
||||||
p.next() // jump to background steps
|
p.next() // jump to background steps
|
||||||
if ft.Background.Steps, err = p.parseSteps(); err != nil {
|
if ft.Background.Steps, err = p.parseSteps(); err != nil {
|
||||||
return ft, err
|
return ft, err
|
||||||
|
@ -179,6 +184,7 @@ func (p *parser) parseFeature() (ft *Feature, err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
sc.Title = tok.Value
|
sc.Title = tok.Value
|
||||||
|
sc.Comment = tok.Comment
|
||||||
p.next() // jump to scenario steps
|
p.next() // jump to scenario steps
|
||||||
if sc.Steps, err = p.parseSteps(); err != nil {
|
if sc.Steps, err = p.parseSteps(); err != nil {
|
||||||
return ft, err
|
return ft, err
|
||||||
|
@ -191,7 +197,7 @@ func (p *parser) parseFeature() (ft *Feature, err error) {
|
||||||
|
|
||||||
func (p *parser) parseSteps() (steps []*Step, err error) {
|
func (p *parser) parseSteps() (steps []*Step, err error) {
|
||||||
for tok := p.peek(); tok.OfType(allSteps...); tok = p.peek() {
|
for tok := p.peek(); tok.OfType(allSteps...); tok = p.peek() {
|
||||||
step := &Step{Text: tok.Value}
|
step := &Step{Text: tok.Value, Comment: tok.Comment}
|
||||||
switch tok.Type {
|
switch tok.Type {
|
||||||
case lexer.GIVEN:
|
case lexer.GIVEN:
|
||||||
step.Type = Given
|
step.Type = Given
|
||||||
|
|
|
@ -13,6 +13,8 @@ var testStepSamples = map[string]string{
|
||||||
"given_table_hash": `Given there are users:
|
"given_table_hash": `Given there are users:
|
||||||
| name | John Doe |`,
|
| name | John Doe |`,
|
||||||
|
|
||||||
|
"step_comment": `Given I'm an admin # sets admin permissions`,
|
||||||
|
|
||||||
"given_table": `Given there are users:
|
"given_table": `Given there are users:
|
||||||
| name | lastname |
|
| name | lastname |
|
||||||
| John | Doe |
|
| John | Doe |
|
||||||
|
@ -65,6 +67,12 @@ func (s *Step) assertPyString(text string, t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Step) assertComment(comment string, t *testing.T) {
|
||||||
|
if s.Comment != comment {
|
||||||
|
t.Fatalf("expected step '%s' comment to be '%s', but got '%s'", s.Text, comment, s.Comment)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (s *Step) assertTableRow(t *testing.T, num int, cols ...string) {
|
func (s *Step) assertTableRow(t *testing.T, num int, cols ...string) {
|
||||||
if s.Table == nil {
|
if s.Table == nil {
|
||||||
t.Fatalf("step '%s %s' has no table", s.Type, s.Text)
|
t.Fatalf("step '%s %s' has no table", s.Type, s.Text)
|
||||||
|
@ -106,6 +114,31 @@ func Test_parse_basic_given_step(t *testing.T) {
|
||||||
}, t)
|
}, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_parse_step_with_comment(t *testing.T) {
|
||||||
|
p := &parser{
|
||||||
|
lx: lexer.New(strings.NewReader(testStepSamples["step_comment"])),
|
||||||
|
path: "some.feature",
|
||||||
|
ast: newAST(),
|
||||||
|
}
|
||||||
|
steps, err := p.parseSteps()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %s", err)
|
||||||
|
}
|
||||||
|
if len(steps) != 1 {
|
||||||
|
t.Fatalf("expected one step to be parsed")
|
||||||
|
}
|
||||||
|
|
||||||
|
steps[0].assertType(Given, t)
|
||||||
|
steps[0].assertText("I'm an admin", t)
|
||||||
|
steps[0].assertComment("sets admin permissions", t)
|
||||||
|
|
||||||
|
p.next() // step over to eof
|
||||||
|
p.ast.assertMatchesTypes([]lexer.TokenType{
|
||||||
|
lexer.GIVEN,
|
||||||
|
lexer.EOF,
|
||||||
|
}, t)
|
||||||
|
}
|
||||||
|
|
||||||
func Test_parse_hash_table_given_step(t *testing.T) {
|
func Test_parse_hash_table_given_step(t *testing.T) {
|
||||||
p := &parser{
|
p := &parser{
|
||||||
lx: lexer.New(strings.NewReader(testStepSamples["given_table_hash"])),
|
lx: lexer.New(strings.NewReader(testStepSamples["given_table_hash"])),
|
||||||
|
|
Загрузка…
Создание таблицы
Сослаться в новой задаче