ship gherkin parser in a subpackage to prevent compatibility conflicts
Этот коммит содержится в:
родитель
c25dedf000
коммит
518101cbc7
25 изменённых файлов: 6720 добавлений и 25 удалений
|
@ -5,11 +5,9 @@ go:
|
|||
- 1.6
|
||||
- tip
|
||||
|
||||
script:
|
||||
# pull all external dependencies
|
||||
- go get gopkg.in/cucumber/gherkin-go.v3
|
||||
- go get github.com/shiena/ansicolor
|
||||
install: go install github.com/DATA-DOG/godog/cmd/godog
|
||||
|
||||
script:
|
||||
# run standard go tests
|
||||
- go vet ./...
|
||||
- go fmt ./...
|
||||
|
@ -17,4 +15,4 @@ script:
|
|||
- go test -race
|
||||
|
||||
# run features
|
||||
- go run cmd/godog/main.go --format=progress --concurrency=4
|
||||
- godog --format=progress --concurrency=4
|
||||
|
|
19
Makefile
19
Makefile
|
@ -1,15 +1,18 @@
|
|||
.PHONY: test deps
|
||||
.PHONY: test gherkin
|
||||
|
||||
test:
|
||||
@echo "running all tests"
|
||||
@go install ./...
|
||||
@go fmt ./...
|
||||
@golint ./...
|
||||
@golint github.com/DATA-DOG/godog
|
||||
@golint github.com/DATA-DOG/godog/cmd/godog
|
||||
go vet ./...
|
||||
go test
|
||||
go run cmd/godog/main.go -f progress -c 4
|
||||
|
||||
deps:
|
||||
@echo "updating all dependencies"
|
||||
go get -u gopkg.in/cucumber/gherkin-go.v3
|
||||
go get -u github.com/shiena/ansicolor
|
||||
godog -f progress -c 4
|
||||
|
||||
gherkin:
|
||||
@if [ -z "$(VERS)" ]; then echo "Provide gherkin version like: 'VERS=commit-hash'"; exit 1; fi
|
||||
@rm -rf gherkin
|
||||
@mkdir gherkin
|
||||
@curl -s -L https://github.com/cucumber/gherkin-go/tarball/$(VERS) | tar -C gherkin -zx --strip-components 1
|
||||
@rm -rf gherkin/{.travis.yml,.gitignore,*_test.go,gherkin-generate*,*.razor,*.jq,Makefile,CONTRIBUTING.md}
|
||||
|
|
|
@ -29,6 +29,10 @@ used in tests. **Godog** uses standard **go** ast and build utils to
|
|||
generate test suite package and even builds it with **go test -c**
|
||||
command. It even passes all your environment exported vars.
|
||||
|
||||
**Godog** ships gherkin parser dependency as a subpackage. This will
|
||||
ensure that it is always compatible with the installed version of godog.
|
||||
So in general there are no vendor dependencies needed for installation.
|
||||
|
||||
### Install
|
||||
|
||||
go get github.com/DATA-DOG/godog/cmd/godog
|
||||
|
@ -153,6 +157,10 @@ See implementation examples:
|
|||
|
||||
### Changes
|
||||
|
||||
**2016-05-26**
|
||||
- pack gherkin dependency in a subpackage to prevent compatibility
|
||||
conflicts.
|
||||
|
||||
**2016-05-25**
|
||||
- refactored test suite build tooling in order to use standard **go test**
|
||||
tool. Which allows to compile package with godog runner script in **go**
|
||||
|
|
41
cmd/godog/ansicolor.go
Обычный файл
41
cmd/godog/ansicolor.go
Обычный файл
|
@ -0,0 +1,41 @@
|
|||
// Copyright 2014 shiena Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import "io"
|
||||
|
||||
type outputMode int
|
||||
|
||||
// DiscardNonColorEscSeq supports the divided color escape sequence.
|
||||
// But non-color escape sequence is not output.
|
||||
// Please use the OutputNonColorEscSeq If you want to output a non-color
|
||||
// escape sequences such as ncurses. However, it does not support the divided
|
||||
// color escape sequence.
|
||||
const (
|
||||
_ outputMode = iota
|
||||
discardNonColorEscSeq
|
||||
outputNonColorEscSeq
|
||||
)
|
||||
|
||||
// NewAnsiColorWriter creates and initializes a new ansiColorWriter
|
||||
// using io.Writer w as its initial contents.
|
||||
// In the console of Windows, which change the foreground and background
|
||||
// colors of the text by the escape sequence.
|
||||
// In the console of other systems, which writes to w all text.
|
||||
func createAnsiColorWriter(w io.Writer) io.Writer {
|
||||
return createModeAnsiColorWriter(w, discardNonColorEscSeq)
|
||||
}
|
||||
|
||||
// NewModeAnsiColorWriter create and initializes a new ansiColorWriter
|
||||
// by specifying the outputMode.
|
||||
func createModeAnsiColorWriter(w io.Writer, mode outputMode) io.Writer {
|
||||
if _, ok := w.(*ansiColorWriter); !ok {
|
||||
return &ansiColorWriter{
|
||||
w: w,
|
||||
mode: mode,
|
||||
}
|
||||
}
|
||||
return w
|
||||
}
|
18
cmd/godog/ansicolor_ansi.go
Обычный файл
18
cmd/godog/ansicolor_ansi.go
Обычный файл
|
@ -0,0 +1,18 @@
|
|||
// Copyright 2014 shiena Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !windows
|
||||
|
||||
package main
|
||||
|
||||
import "io"
|
||||
|
||||
type ansiColorWriter struct {
|
||||
w io.Writer
|
||||
mode outputMode
|
||||
}
|
||||
|
||||
func (cw *ansiColorWriter) Write(p []byte) (int, error) {
|
||||
return cw.w.Write(p)
|
||||
}
|
417
cmd/godog/ansicolor_windows.go
Обычный файл
417
cmd/godog/ansicolor_windows.go
Обычный файл
|
@ -0,0 +1,417 @@
|
|||
// Copyright 2014 shiena Authors. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build windows
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"strings"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type csiState int
|
||||
|
||||
const (
|
||||
outsideCsiCode csiState = iota
|
||||
firstCsiCode
|
||||
secondCsiCode
|
||||
)
|
||||
|
||||
type parseResult int
|
||||
|
||||
const (
|
||||
noConsole parseResult = iota
|
||||
changedColor
|
||||
unknown
|
||||
)
|
||||
|
||||
type ansiColorWriter struct {
|
||||
w io.Writer
|
||||
mode outputMode
|
||||
state csiState
|
||||
paramStartBuf bytes.Buffer
|
||||
paramBuf bytes.Buffer
|
||||
}
|
||||
|
||||
const (
|
||||
firstCsiChar byte = '\x1b'
|
||||
secondeCsiChar byte = '['
|
||||
separatorChar byte = ';'
|
||||
sgrCode byte = 'm'
|
||||
)
|
||||
|
||||
const (
|
||||
foregroundBlue = uint16(0x0001)
|
||||
foregroundGreen = uint16(0x0002)
|
||||
foregroundRed = uint16(0x0004)
|
||||
foregroundIntensity = uint16(0x0008)
|
||||
backgroundBlue = uint16(0x0010)
|
||||
backgroundGreen = uint16(0x0020)
|
||||
backgroundRed = uint16(0x0040)
|
||||
backgroundIntensity = uint16(0x0080)
|
||||
underscore = uint16(0x8000)
|
||||
|
||||
foregroundMask = foregroundBlue | foregroundGreen | foregroundRed | foregroundIntensity
|
||||
backgroundMask = backgroundBlue | backgroundGreen | backgroundRed | backgroundIntensity
|
||||
)
|
||||
|
||||
const (
|
||||
ansiReset = "0"
|
||||
ansiIntensityOn = "1"
|
||||
ansiIntensityOff = "21"
|
||||
ansiUnderlineOn = "4"
|
||||
ansiUnderlineOff = "24"
|
||||
ansiBlinkOn = "5"
|
||||
ansiBlinkOff = "25"
|
||||
|
||||
ansiForegroundBlack = "30"
|
||||
ansiForegroundRed = "31"
|
||||
ansiForegroundGreen = "32"
|
||||
ansiForegroundYellow = "33"
|
||||
ansiForegroundBlue = "34"
|
||||
ansiForegroundMagenta = "35"
|
||||
ansiForegroundCyan = "36"
|
||||
ansiForegroundWhite = "37"
|
||||
ansiForegroundDefault = "39"
|
||||
|
||||
ansiBackgroundBlack = "40"
|
||||
ansiBackgroundRed = "41"
|
||||
ansiBackgroundGreen = "42"
|
||||
ansiBackgroundYellow = "43"
|
||||
ansiBackgroundBlue = "44"
|
||||
ansiBackgroundMagenta = "45"
|
||||
ansiBackgroundCyan = "46"
|
||||
ansiBackgroundWhite = "47"
|
||||
ansiBackgroundDefault = "49"
|
||||
|
||||
ansiLightForegroundGray = "90"
|
||||
ansiLightForegroundRed = "91"
|
||||
ansiLightForegroundGreen = "92"
|
||||
ansiLightForegroundYellow = "93"
|
||||
ansiLightForegroundBlue = "94"
|
||||
ansiLightForegroundMagenta = "95"
|
||||
ansiLightForegroundCyan = "96"
|
||||
ansiLightForegroundWhite = "97"
|
||||
|
||||
ansiLightBackgroundGray = "100"
|
||||
ansiLightBackgroundRed = "101"
|
||||
ansiLightBackgroundGreen = "102"
|
||||
ansiLightBackgroundYellow = "103"
|
||||
ansiLightBackgroundBlue = "104"
|
||||
ansiLightBackgroundMagenta = "105"
|
||||
ansiLightBackgroundCyan = "106"
|
||||
ansiLightBackgroundWhite = "107"
|
||||
)
|
||||
|
||||
type drawType int
|
||||
|
||||
const (
|
||||
foreground drawType = iota
|
||||
background
|
||||
)
|
||||
|
||||
type winColor struct {
|
||||
code uint16
|
||||
drawType drawType
|
||||
}
|
||||
|
||||
var colorMap = map[string]winColor{
|
||||
ansiForegroundBlack: {0, foreground},
|
||||
ansiForegroundRed: {foregroundRed, foreground},
|
||||
ansiForegroundGreen: {foregroundGreen, foreground},
|
||||
ansiForegroundYellow: {foregroundRed | foregroundGreen, foreground},
|
||||
ansiForegroundBlue: {foregroundBlue, foreground},
|
||||
ansiForegroundMagenta: {foregroundRed | foregroundBlue, foreground},
|
||||
ansiForegroundCyan: {foregroundGreen | foregroundBlue, foreground},
|
||||
ansiForegroundWhite: {foregroundRed | foregroundGreen | foregroundBlue, foreground},
|
||||
ansiForegroundDefault: {foregroundRed | foregroundGreen | foregroundBlue, foreground},
|
||||
|
||||
ansiBackgroundBlack: {0, background},
|
||||
ansiBackgroundRed: {backgroundRed, background},
|
||||
ansiBackgroundGreen: {backgroundGreen, background},
|
||||
ansiBackgroundYellow: {backgroundRed | backgroundGreen, background},
|
||||
ansiBackgroundBlue: {backgroundBlue, background},
|
||||
ansiBackgroundMagenta: {backgroundRed | backgroundBlue, background},
|
||||
ansiBackgroundCyan: {backgroundGreen | backgroundBlue, background},
|
||||
ansiBackgroundWhite: {backgroundRed | backgroundGreen | backgroundBlue, background},
|
||||
ansiBackgroundDefault: {0, background},
|
||||
|
||||
ansiLightForegroundGray: {foregroundIntensity, foreground},
|
||||
ansiLightForegroundRed: {foregroundIntensity | foregroundRed, foreground},
|
||||
ansiLightForegroundGreen: {foregroundIntensity | foregroundGreen, foreground},
|
||||
ansiLightForegroundYellow: {foregroundIntensity | foregroundRed | foregroundGreen, foreground},
|
||||
ansiLightForegroundBlue: {foregroundIntensity | foregroundBlue, foreground},
|
||||
ansiLightForegroundMagenta: {foregroundIntensity | foregroundRed | foregroundBlue, foreground},
|
||||
ansiLightForegroundCyan: {foregroundIntensity | foregroundGreen | foregroundBlue, foreground},
|
||||
ansiLightForegroundWhite: {foregroundIntensity | foregroundRed | foregroundGreen | foregroundBlue, foreground},
|
||||
|
||||
ansiLightBackgroundGray: {backgroundIntensity, background},
|
||||
ansiLightBackgroundRed: {backgroundIntensity | backgroundRed, background},
|
||||
ansiLightBackgroundGreen: {backgroundIntensity | backgroundGreen, background},
|
||||
ansiLightBackgroundYellow: {backgroundIntensity | backgroundRed | backgroundGreen, background},
|
||||
ansiLightBackgroundBlue: {backgroundIntensity | backgroundBlue, background},
|
||||
ansiLightBackgroundMagenta: {backgroundIntensity | backgroundRed | backgroundBlue, background},
|
||||
ansiLightBackgroundCyan: {backgroundIntensity | backgroundGreen | backgroundBlue, background},
|
||||
ansiLightBackgroundWhite: {backgroundIntensity | backgroundRed | backgroundGreen | backgroundBlue, background},
|
||||
}
|
||||
|
||||
var (
|
||||
kernel32 = syscall.NewLazyDLL("kernel32.dll")
|
||||
procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
|
||||
procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
|
||||
defaultAttr *textAttributes
|
||||
)
|
||||
|
||||
func init() {
|
||||
screenInfo := getConsoleScreenBufferInfo(uintptr(syscall.Stdout))
|
||||
if screenInfo != nil {
|
||||
colorMap[ansiForegroundDefault] = winColor{
|
||||
screenInfo.WAttributes & (foregroundRed | foregroundGreen | foregroundBlue),
|
||||
foreground,
|
||||
}
|
||||
colorMap[ansiBackgroundDefault] = winColor{
|
||||
screenInfo.WAttributes & (backgroundRed | backgroundGreen | backgroundBlue),
|
||||
background,
|
||||
}
|
||||
defaultAttr = convertTextAttr(screenInfo.WAttributes)
|
||||
}
|
||||
}
|
||||
|
||||
type coord struct {
|
||||
X, Y int16
|
||||
}
|
||||
|
||||
type smallRect struct {
|
||||
Left, Top, Right, Bottom int16
|
||||
}
|
||||
|
||||
type consoleScreenBufferInfo struct {
|
||||
DwSize coord
|
||||
DwCursorPosition coord
|
||||
WAttributes uint16
|
||||
SrWindow smallRect
|
||||
DwMaximumWindowSize coord
|
||||
}
|
||||
|
||||
func getConsoleScreenBufferInfo(hConsoleOutput uintptr) *consoleScreenBufferInfo {
|
||||
var csbi consoleScreenBufferInfo
|
||||
ret, _, _ := procGetConsoleScreenBufferInfo.Call(
|
||||
hConsoleOutput,
|
||||
uintptr(unsafe.Pointer(&csbi)))
|
||||
if ret == 0 {
|
||||
return nil
|
||||
}
|
||||
return &csbi
|
||||
}
|
||||
|
||||
func setConsoleTextAttribute(hConsoleOutput uintptr, wAttributes uint16) bool {
|
||||
ret, _, _ := procSetConsoleTextAttribute.Call(
|
||||
hConsoleOutput,
|
||||
uintptr(wAttributes))
|
||||
return ret != 0
|
||||
}
|
||||
|
||||
type textAttributes struct {
|
||||
foregroundColor uint16
|
||||
backgroundColor uint16
|
||||
foregroundIntensity uint16
|
||||
backgroundIntensity uint16
|
||||
underscore uint16
|
||||
otherAttributes uint16
|
||||
}
|
||||
|
||||
func convertTextAttr(winAttr uint16) *textAttributes {
|
||||
fgColor := winAttr & (foregroundRed | foregroundGreen | foregroundBlue)
|
||||
bgColor := winAttr & (backgroundRed | backgroundGreen | backgroundBlue)
|
||||
fgIntensity := winAttr & foregroundIntensity
|
||||
bgIntensity := winAttr & backgroundIntensity
|
||||
underline := winAttr & underscore
|
||||
otherAttributes := winAttr &^ (foregroundMask | backgroundMask | underscore)
|
||||
return &textAttributes{fgColor, bgColor, fgIntensity, bgIntensity, underline, otherAttributes}
|
||||
}
|
||||
|
||||
func convertWinAttr(textAttr *textAttributes) uint16 {
|
||||
var winAttr uint16
|
||||
winAttr |= textAttr.foregroundColor
|
||||
winAttr |= textAttr.backgroundColor
|
||||
winAttr |= textAttr.foregroundIntensity
|
||||
winAttr |= textAttr.backgroundIntensity
|
||||
winAttr |= textAttr.underscore
|
||||
winAttr |= textAttr.otherAttributes
|
||||
return winAttr
|
||||
}
|
||||
|
||||
func changeColor(param []byte) parseResult {
|
||||
screenInfo := getConsoleScreenBufferInfo(uintptr(syscall.Stdout))
|
||||
if screenInfo == nil {
|
||||
return noConsole
|
||||
}
|
||||
|
||||
winAttr := convertTextAttr(screenInfo.WAttributes)
|
||||
strParam := string(param)
|
||||
if len(strParam) <= 0 {
|
||||
strParam = "0"
|
||||
}
|
||||
csiParam := strings.Split(strParam, string(separatorChar))
|
||||
for _, p := range csiParam {
|
||||
c, ok := colorMap[p]
|
||||
switch {
|
||||
case !ok:
|
||||
switch p {
|
||||
case ansiReset:
|
||||
winAttr.foregroundColor = defaultAttr.foregroundColor
|
||||
winAttr.backgroundColor = defaultAttr.backgroundColor
|
||||
winAttr.foregroundIntensity = defaultAttr.foregroundIntensity
|
||||
winAttr.backgroundIntensity = defaultAttr.backgroundIntensity
|
||||
winAttr.underscore = 0
|
||||
winAttr.otherAttributes = 0
|
||||
case ansiIntensityOn:
|
||||
winAttr.foregroundIntensity = foregroundIntensity
|
||||
case ansiIntensityOff:
|
||||
winAttr.foregroundIntensity = 0
|
||||
case ansiUnderlineOn:
|
||||
winAttr.underscore = underscore
|
||||
case ansiUnderlineOff:
|
||||
winAttr.underscore = 0
|
||||
case ansiBlinkOn:
|
||||
winAttr.backgroundIntensity = backgroundIntensity
|
||||
case ansiBlinkOff:
|
||||
winAttr.backgroundIntensity = 0
|
||||
default:
|
||||
// unknown code
|
||||
}
|
||||
case c.drawType == foreground:
|
||||
winAttr.foregroundColor = c.code
|
||||
case c.drawType == background:
|
||||
winAttr.backgroundColor = c.code
|
||||
}
|
||||
}
|
||||
winTextAttribute := convertWinAttr(winAttr)
|
||||
setConsoleTextAttribute(uintptr(syscall.Stdout), winTextAttribute)
|
||||
|
||||
return changedColor
|
||||
}
|
||||
|
||||
func parseEscapeSequence(command byte, param []byte) parseResult {
|
||||
if defaultAttr == nil {
|
||||
return noConsole
|
||||
}
|
||||
|
||||
switch command {
|
||||
case sgrCode:
|
||||
return changeColor(param)
|
||||
default:
|
||||
return unknown
|
||||
}
|
||||
}
|
||||
|
||||
func (cw *ansiColorWriter) flushBuffer() (int, error) {
|
||||
return cw.flushTo(cw.w)
|
||||
}
|
||||
|
||||
func (cw *ansiColorWriter) resetBuffer() (int, error) {
|
||||
return cw.flushTo(nil)
|
||||
}
|
||||
|
||||
func (cw *ansiColorWriter) flushTo(w io.Writer) (int, error) {
|
||||
var n1, n2 int
|
||||
var err error
|
||||
|
||||
startBytes := cw.paramStartBuf.Bytes()
|
||||
cw.paramStartBuf.Reset()
|
||||
if w != nil {
|
||||
n1, err = cw.w.Write(startBytes)
|
||||
if err != nil {
|
||||
return n1, err
|
||||
}
|
||||
} else {
|
||||
n1 = len(startBytes)
|
||||
}
|
||||
paramBytes := cw.paramBuf.Bytes()
|
||||
cw.paramBuf.Reset()
|
||||
if w != nil {
|
||||
n2, err = cw.w.Write(paramBytes)
|
||||
if err != nil {
|
||||
return n1 + n2, err
|
||||
}
|
||||
} else {
|
||||
n2 = len(paramBytes)
|
||||
}
|
||||
return n1 + n2, nil
|
||||
}
|
||||
|
||||
func isParameterChar(b byte) bool {
|
||||
return ('0' <= b && b <= '9') || b == separatorChar
|
||||
}
|
||||
|
||||
func (cw *ansiColorWriter) Write(p []byte) (int, error) {
|
||||
r, nw, first, last := 0, 0, 0, 0
|
||||
if cw.mode != DiscardNonColorEscSeq {
|
||||
cw.state = outsideCsiCode
|
||||
cw.resetBuffer()
|
||||
}
|
||||
|
||||
var err error
|
||||
for i, ch := range p {
|
||||
switch cw.state {
|
||||
case outsideCsiCode:
|
||||
if ch == firstCsiChar {
|
||||
cw.paramStartBuf.WriteByte(ch)
|
||||
cw.state = firstCsiCode
|
||||
}
|
||||
case firstCsiCode:
|
||||
switch ch {
|
||||
case firstCsiChar:
|
||||
cw.paramStartBuf.WriteByte(ch)
|
||||
break
|
||||
case secondeCsiChar:
|
||||
cw.paramStartBuf.WriteByte(ch)
|
||||
cw.state = secondCsiCode
|
||||
last = i - 1
|
||||
default:
|
||||
cw.resetBuffer()
|
||||
cw.state = outsideCsiCode
|
||||
}
|
||||
case secondCsiCode:
|
||||
if isParameterChar(ch) {
|
||||
cw.paramBuf.WriteByte(ch)
|
||||
} else {
|
||||
nw, err = cw.w.Write(p[first:last])
|
||||
r += nw
|
||||
if err != nil {
|
||||
return r, err
|
||||
}
|
||||
first = i + 1
|
||||
result := parseEscapeSequence(ch, cw.paramBuf.Bytes())
|
||||
if result == noConsole || (cw.mode == OutputNonColorEscSeq && result == unknown) {
|
||||
cw.paramBuf.WriteByte(ch)
|
||||
nw, err := cw.flushBuffer()
|
||||
if err != nil {
|
||||
return r, err
|
||||
}
|
||||
r += nw
|
||||
} else {
|
||||
n, _ := cw.resetBuffer()
|
||||
// Add one more to the size of the buffer for the last ch
|
||||
r += n + 1
|
||||
}
|
||||
|
||||
cw.state = outsideCsiCode
|
||||
}
|
||||
default:
|
||||
cw.state = outsideCsiCode
|
||||
}
|
||||
}
|
||||
|
||||
if cw.mode != DiscardNonColorEscSeq || cw.state == outsideCsiCode {
|
||||
nw, err = cw.w.Write(p[first:len(p)])
|
||||
r += nw
|
||||
}
|
||||
|
||||
return r, err
|
||||
}
|
|
@ -12,7 +12,6 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/DATA-DOG/godog"
|
||||
"github.com/shiena/ansicolor"
|
||||
)
|
||||
|
||||
var statusMatch = regexp.MustCompile("^exit status (\\d+)")
|
||||
|
@ -21,8 +20,8 @@ var parsedStatus int
|
|||
func buildAndRun() (int, error) {
|
||||
var status int
|
||||
// will support Ansi colors for windows
|
||||
stdout := ansicolor.NewAnsiColorWriter(os.Stdout)
|
||||
stderr := ansicolor.NewAnsiColorWriter(statusOutputFilter(os.Stderr))
|
||||
stdout := createAnsiColorWriter(os.Stdout)
|
||||
stderr := createAnsiColorWriter(statusOutputFilter(os.Stderr))
|
||||
|
||||
dir := fmt.Sprintf(filepath.Join("%s", "godog-%d"), os.TempDir(), time.Now().UnixNano())
|
||||
err := godog.Build(dir)
|
||||
|
|
2
fmt.go
2
fmt.go
|
@ -10,7 +10,7 @@ import (
|
|||
"time"
|
||||
"unicode"
|
||||
|
||||
"gopkg.in/cucumber/gherkin-go.v3"
|
||||
"github.com/DATA-DOG/godog/gherkin"
|
||||
)
|
||||
|
||||
// some snippet formatting regexps
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
"os"
|
||||
"time"
|
||||
|
||||
"gopkg.in/cucumber/gherkin-go.v3"
|
||||
"github.com/DATA-DOG/godog/gherkin"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"gopkg.in/cucumber/gherkin-go.v3"
|
||||
"github.com/DATA-DOG/godog/gherkin"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
"sync"
|
||||
"time"
|
||||
|
||||
"gopkg.in/cucumber/gherkin-go.v3"
|
||||
"github.com/DATA-DOG/godog/gherkin"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package godog
|
||||
|
||||
import "gopkg.in/cucumber/gherkin-go.v3"
|
||||
import "github.com/DATA-DOG/godog/gherkin"
|
||||
|
||||
type testFormatter struct {
|
||||
basefmt
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package godog
|
||||
|
||||
import "gopkg.in/cucumber/gherkin-go.v3"
|
||||
import "github.com/DATA-DOG/godog/gherkin"
|
||||
|
||||
// examples is a helper func to cast gherkin.Examples
|
||||
// or gherkin.BaseExamples if its empty
|
||||
|
|
21
gherkin/LICENSE
Обычный файл
21
gherkin/LICENSE
Обычный файл
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2016 Cucumber Ltd, Gaspar Nagy
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
3
gherkin/README.md
Обычный файл
3
gherkin/README.md
Обычный файл
|
@ -0,0 +1,3 @@
|
|||
[](http://travis-ci.org/cucumber/gherkin-go)
|
||||
|
||||
Gherkin parser/compiler for Go. Please see [Gherkin](https://github.com/cucumber/gherkin) for details.
|
97
gherkin/ast.go
Обычный файл
97
gherkin/ast.go
Обычный файл
|
@ -0,0 +1,97 @@
|
|||
package gherkin
|
||||
|
||||
type Location struct {
|
||||
Line int `json:"line"`
|
||||
Column int `json:"column"`
|
||||
}
|
||||
|
||||
type Node struct {
|
||||
Location *Location `json:"location,omitempty"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type Feature struct {
|
||||
Node
|
||||
Tags []*Tag `json:"tags"`
|
||||
Language string `json:"language,omitempty"`
|
||||
Keyword string `json:"keyword"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Background *Background `json:"background,omitempty"`
|
||||
ScenarioDefinitions []interface{} `json:"scenarioDefinitions"`
|
||||
Comments []*Comment `json:"comments"`
|
||||
}
|
||||
|
||||
type Comment struct {
|
||||
Node
|
||||
Location *Location `json:"location,omitempty"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
type Tag struct {
|
||||
Node
|
||||
Location *Location `json:"location,omitempty"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type Background struct {
|
||||
ScenarioDefinition
|
||||
}
|
||||
|
||||
type Scenario struct {
|
||||
ScenarioDefinition
|
||||
Tags []*Tag `json:"tags"`
|
||||
}
|
||||
|
||||
type ScenarioOutline struct {
|
||||
ScenarioDefinition
|
||||
Tags []*Tag `json:"tags"`
|
||||
Examples []*Examples `json:"examples,omitempty"`
|
||||
}
|
||||
|
||||
type Examples struct {
|
||||
Node
|
||||
Tags []*Tag `json:"tags"`
|
||||
Keyword string `json:"keyword"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description,omitempty"`
|
||||
TableHeader *TableRow `json:"tableHeader"`
|
||||
TableBody []*TableRow `json:"tableBody"`
|
||||
}
|
||||
|
||||
type TableRow struct {
|
||||
Node
|
||||
Cells []*TableCell `json:"cells"`
|
||||
}
|
||||
|
||||
type TableCell struct {
|
||||
Node
|
||||
Value string `json:"value"`
|
||||
}
|
||||
|
||||
type ScenarioDefinition struct {
|
||||
Node
|
||||
Keyword string `json:"keyword"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Steps []*Step `json:"steps"`
|
||||
}
|
||||
|
||||
type Step struct {
|
||||
Node
|
||||
Keyword string `json:"keyword"`
|
||||
Text string `json:"text"`
|
||||
Argument interface{} `json:"argument,omitempty"`
|
||||
}
|
||||
|
||||
type DocString struct {
|
||||
Node
|
||||
ContentType string `json:"contentType,omitempty"`
|
||||
Content string `json:"content"`
|
||||
Delimitter string `json:"-"`
|
||||
}
|
||||
|
||||
type DataTable struct {
|
||||
Node
|
||||
Rows []*TableRow `json:"rows"`
|
||||
}
|
378
gherkin/astbuilder.go
Обычный файл
378
gherkin/astbuilder.go
Обычный файл
|
@ -0,0 +1,378 @@
|
|||
package gherkin
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
type AstBuilder interface {
|
||||
Builder
|
||||
GetFeature() *Feature
|
||||
}
|
||||
|
||||
type astBuilder struct {
|
||||
stack []*astNode
|
||||
comments []*Comment
|
||||
}
|
||||
|
||||
func (t *astBuilder) Reset() {
|
||||
t.comments = []*Comment{}
|
||||
t.stack = []*astNode{}
|
||||
t.push(newAstNode(RuleType_None))
|
||||
}
|
||||
|
||||
func (t *astBuilder) GetFeature() *Feature {
|
||||
res := t.currentNode().getSingle(RuleType_Feature)
|
||||
if val, ok := res.(*Feature); ok {
|
||||
return val
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type astNode struct {
|
||||
ruleType RuleType
|
||||
subNodes map[RuleType][]interface{}
|
||||
}
|
||||
|
||||
func (a *astNode) add(rt RuleType, obj interface{}) {
|
||||
a.subNodes[rt] = append(a.subNodes[rt], obj)
|
||||
}
|
||||
|
||||
func (a *astNode) getSingle(rt RuleType) interface{} {
|
||||
if val, ok := a.subNodes[rt]; ok {
|
||||
for i := range val {
|
||||
return val[i]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *astNode) getItems(rt RuleType) []interface{} {
|
||||
var res []interface{}
|
||||
if val, ok := a.subNodes[rt]; ok {
|
||||
for i := range val {
|
||||
res = append(res, val[i])
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func (a *astNode) getToken(tt TokenType) *Token {
|
||||
if val, ok := a.getSingle(tt.RuleType()).(*Token); ok {
|
||||
return val
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *astNode) getTokens(tt TokenType) []*Token {
|
||||
var items = a.getItems(tt.RuleType())
|
||||
var tokens []*Token
|
||||
for i := range items {
|
||||
if val, ok := items[i].(*Token); ok {
|
||||
tokens = append(tokens, val)
|
||||
}
|
||||
}
|
||||
return tokens
|
||||
}
|
||||
|
||||
func (t *astBuilder) currentNode() *astNode {
|
||||
if len(t.stack) > 0 {
|
||||
return t.stack[len(t.stack)-1]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func newAstNode(rt RuleType) *astNode {
|
||||
return &astNode{
|
||||
ruleType: rt,
|
||||
subNodes: make(map[RuleType][]interface{}),
|
||||
}
|
||||
}
|
||||
|
||||
func NewAstBuilder() AstBuilder {
|
||||
builder := new(astBuilder)
|
||||
builder.comments = []*Comment{}
|
||||
builder.push(newAstNode(RuleType_None))
|
||||
return builder
|
||||
}
|
||||
|
||||
func (t *astBuilder) push(n *astNode) {
|
||||
t.stack = append(t.stack, n)
|
||||
}
|
||||
|
||||
func (t *astBuilder) pop() *astNode {
|
||||
x := t.stack[len(t.stack)-1]
|
||||
t.stack = t.stack[:len(t.stack)-1]
|
||||
return x
|
||||
}
|
||||
|
||||
func (t *astBuilder) Build(tok *Token) (bool, error) {
|
||||
if tok.Type == TokenType_Comment {
|
||||
comment := new(Comment)
|
||||
comment.Type = "Comment"
|
||||
comment.Location = astLocation(tok)
|
||||
comment.Text = tok.Text
|
||||
t.comments = append(t.comments, comment)
|
||||
} else {
|
||||
t.currentNode().add(tok.Type.RuleType(), tok)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
func (t *astBuilder) StartRule(r RuleType) (bool, error) {
|
||||
t.push(newAstNode(r))
|
||||
return true, nil
|
||||
}
|
||||
func (t *astBuilder) EndRule(r RuleType) (bool, error) {
|
||||
node := t.pop()
|
||||
transformedNode, err := t.transformNode(node)
|
||||
t.currentNode().add(node.ruleType, transformedNode)
|
||||
return true, err
|
||||
}
|
||||
|
||||
func (t *astBuilder) transformNode(node *astNode) (interface{}, error) {
|
||||
switch node.ruleType {
|
||||
|
||||
case RuleType_Step:
|
||||
stepLine := node.getToken(TokenType_StepLine)
|
||||
step := new(Step)
|
||||
step.Type = "Step"
|
||||
step.Location = astLocation(stepLine)
|
||||
step.Keyword = stepLine.Keyword
|
||||
step.Text = stepLine.Text
|
||||
step.Argument = node.getSingle(RuleType_DataTable)
|
||||
if step.Argument == nil {
|
||||
step.Argument = node.getSingle(RuleType_DocString)
|
||||
}
|
||||
return step, nil
|
||||
|
||||
case RuleType_DocString:
|
||||
separatorToken := node.getToken(TokenType_DocStringSeparator)
|
||||
contentType := separatorToken.Text
|
||||
lineTokens := node.getTokens(TokenType_Other)
|
||||
var text string
|
||||
for i := range lineTokens {
|
||||
if i > 0 {
|
||||
text += "\n"
|
||||
}
|
||||
text += lineTokens[i].Text
|
||||
}
|
||||
ds := new(DocString)
|
||||
ds.Type = "DocString"
|
||||
ds.Location = astLocation(separatorToken)
|
||||
ds.ContentType = contentType
|
||||
ds.Content = text
|
||||
ds.Delimitter = DOCSTRING_SEPARATOR // TODO: remember separator
|
||||
return ds, nil
|
||||
|
||||
case RuleType_DataTable:
|
||||
rows, err := astTableRows(node)
|
||||
dt := new(DataTable)
|
||||
dt.Type = "DataTable"
|
||||
dt.Location = rows[0].Location
|
||||
dt.Rows = rows
|
||||
return dt, err
|
||||
|
||||
case RuleType_Background:
|
||||
backgroundLine := node.getToken(TokenType_BackgroundLine)
|
||||
description, _ := node.getSingle(RuleType_Description).(string)
|
||||
bg := new(Background)
|
||||
bg.Type = "Background"
|
||||
bg.Location = astLocation(backgroundLine)
|
||||
bg.Keyword = backgroundLine.Keyword
|
||||
bg.Name = backgroundLine.Text
|
||||
bg.Description = description
|
||||
bg.Steps = astSteps(node)
|
||||
return bg, nil
|
||||
|
||||
case RuleType_Scenario_Definition:
|
||||
tags := astTags(node)
|
||||
scenarioNode, _ := node.getSingle(RuleType_Scenario).(*astNode)
|
||||
if scenarioNode != nil {
|
||||
scenarioLine := scenarioNode.getToken(TokenType_ScenarioLine)
|
||||
description, _ := scenarioNode.getSingle(RuleType_Description).(string)
|
||||
sc := new(Scenario)
|
||||
sc.Type = "Scenario"
|
||||
sc.Tags = tags
|
||||
sc.Location = astLocation(scenarioLine)
|
||||
sc.Keyword = scenarioLine.Keyword
|
||||
sc.Name = scenarioLine.Text
|
||||
sc.Description = description
|
||||
sc.Steps = astSteps(scenarioNode)
|
||||
return sc, nil
|
||||
} else {
|
||||
scenarioOutlineNode, ok := node.getSingle(RuleType_ScenarioOutline).(*astNode)
|
||||
if !ok {
|
||||
panic("Internal grammar error")
|
||||
}
|
||||
scenarioOutlineLine := scenarioOutlineNode.getToken(TokenType_ScenarioOutlineLine)
|
||||
description, _ := scenarioOutlineNode.getSingle(RuleType_Description).(string)
|
||||
sc := new(ScenarioOutline)
|
||||
sc.Type = "ScenarioOutline"
|
||||
sc.Tags = tags
|
||||
sc.Location = astLocation(scenarioOutlineLine)
|
||||
sc.Keyword = scenarioOutlineLine.Keyword
|
||||
sc.Name = scenarioOutlineLine.Text
|
||||
sc.Description = description
|
||||
sc.Steps = astSteps(scenarioOutlineNode)
|
||||
sc.Examples = astExamples(scenarioOutlineNode)
|
||||
return sc, nil
|
||||
}
|
||||
|
||||
case RuleType_Examples_Definition:
|
||||
tags := astTags(node)
|
||||
examplesNode, _ := node.getSingle(RuleType_Examples).(*astNode)
|
||||
examplesLine := examplesNode.getToken(TokenType_ExamplesLine)
|
||||
description, _ := examplesNode.getSingle(RuleType_Description).(string)
|
||||
allRows, err := astTableRows(examplesNode)
|
||||
ex := new(Examples)
|
||||
ex.Type = "Examples"
|
||||
ex.Tags = tags
|
||||
ex.Location = astLocation(examplesLine)
|
||||
ex.Keyword = examplesLine.Keyword
|
||||
ex.Name = examplesLine.Text
|
||||
ex.Description = description
|
||||
ex.TableHeader = allRows[0]
|
||||
ex.TableBody = allRows[1:]
|
||||
return ex, err
|
||||
|
||||
case RuleType_Description:
|
||||
lineTokens := node.getTokens(TokenType_Other)
|
||||
// Trim trailing empty lines
|
||||
end := len(lineTokens)
|
||||
for end > 0 && strings.TrimSpace(lineTokens[end-1].Text) == "" {
|
||||
end--
|
||||
}
|
||||
var desc []string
|
||||
for i := range lineTokens[0:end] {
|
||||
desc = append(desc, lineTokens[i].Text)
|
||||
}
|
||||
return strings.Join(desc, "\n"), nil
|
||||
|
||||
case RuleType_Feature:
|
||||
header, ok := node.getSingle(RuleType_Feature_Header).(*astNode)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
tags := astTags(header)
|
||||
featureLine := header.getToken(TokenType_FeatureLine)
|
||||
if featureLine == nil {
|
||||
return nil, nil
|
||||
}
|
||||
background, _ := node.getSingle(RuleType_Background).(*Background)
|
||||
scenarioDefinitions := node.getItems(RuleType_Scenario_Definition)
|
||||
if scenarioDefinitions == nil {
|
||||
scenarioDefinitions = []interface{}{}
|
||||
}
|
||||
description, _ := header.getSingle(RuleType_Description).(string)
|
||||
|
||||
feat := new(Feature)
|
||||
feat.Type = "Feature"
|
||||
feat.Tags = tags
|
||||
feat.Location = astLocation(featureLine)
|
||||
feat.Language = featureLine.GherkinDialect
|
||||
feat.Keyword = featureLine.Keyword
|
||||
feat.Name = featureLine.Text
|
||||
feat.Description = description
|
||||
feat.Background = background
|
||||
feat.ScenarioDefinitions = scenarioDefinitions
|
||||
feat.Comments = t.comments
|
||||
return feat, nil
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func astLocation(t *Token) *Location {
|
||||
return &Location{
|
||||
Line: t.Location.Line,
|
||||
Column: t.Location.Column,
|
||||
}
|
||||
}
|
||||
|
||||
func astTableRows(t *astNode) (rows []*TableRow, err error) {
|
||||
rows = []*TableRow{}
|
||||
tokens := t.getTokens(TokenType_TableRow)
|
||||
for i := range tokens {
|
||||
row := new(TableRow)
|
||||
row.Type = "TableRow"
|
||||
row.Location = astLocation(tokens[i])
|
||||
row.Cells = astTableCells(tokens[i])
|
||||
rows = append(rows, row)
|
||||
}
|
||||
err = ensureCellCount(rows)
|
||||
return
|
||||
}
|
||||
|
||||
func ensureCellCount(rows []*TableRow) error {
|
||||
if len(rows) <= 1 {
|
||||
return nil
|
||||
}
|
||||
cellCount := len(rows[0].Cells)
|
||||
for i := range rows {
|
||||
if cellCount != len(rows[i].Cells) {
|
||||
return &parseError{"inconsistent cell count within the table", &Location{
|
||||
Line: rows[i].Location.Line,
|
||||
Column: rows[i].Location.Column,
|
||||
}}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func astTableCells(t *Token) (cells []*TableCell) {
|
||||
cells = []*TableCell{}
|
||||
for i := range t.Items {
|
||||
item := t.Items[i]
|
||||
cell := new(TableCell)
|
||||
cell.Type = "TableCell"
|
||||
cell.Location = &Location{
|
||||
Line: t.Location.Line,
|
||||
Column: item.Column,
|
||||
}
|
||||
cell.Value = item.Text
|
||||
cells = append(cells, cell)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func astSteps(t *astNode) (steps []*Step) {
|
||||
steps = []*Step{}
|
||||
tokens := t.getItems(RuleType_Step)
|
||||
for i := range tokens {
|
||||
step, _ := tokens[i].(*Step)
|
||||
steps = append(steps, step)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func astExamples(t *astNode) (examples []*Examples) {
|
||||
examples = []*Examples{}
|
||||
tokens := t.getItems(RuleType_Examples_Definition)
|
||||
for i := range tokens {
|
||||
example, _ := tokens[i].(*Examples)
|
||||
examples = append(examples, example)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func astTags(node *astNode) (tags []*Tag) {
|
||||
tags = []*Tag{}
|
||||
tagsNode, ok := node.getSingle(RuleType_Tags).(*astNode)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
tokens := tagsNode.getTokens(TokenType_TagLine)
|
||||
for i := range tokens {
|
||||
token := tokens[i]
|
||||
for k := range token.Items {
|
||||
item := token.Items[k]
|
||||
tag := new(Tag)
|
||||
tag.Type = "Tag"
|
||||
tag.Location = &Location{
|
||||
Line: token.Location.Line,
|
||||
Column: item.Column,
|
||||
}
|
||||
tag.Name = item.Text
|
||||
tags = append(tags, tag)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
47
gherkin/dialect.go
Обычный файл
47
gherkin/dialect.go
Обычный файл
|
@ -0,0 +1,47 @@
|
|||
package gherkin
|
||||
|
||||
type GherkinDialect struct {
|
||||
Language string
|
||||
Name string
|
||||
Native string
|
||||
Keywords map[string][]string
|
||||
}
|
||||
|
||||
func (g *GherkinDialect) FeatureKeywords() []string {
|
||||
return g.Keywords["feature"]
|
||||
}
|
||||
|
||||
func (g *GherkinDialect) ScenarioKeywords() []string {
|
||||
return g.Keywords["scenario"]
|
||||
}
|
||||
|
||||
func (g *GherkinDialect) StepKeywords() []string {
|
||||
result := g.Keywords["given"]
|
||||
result = append(result, g.Keywords["when"]...)
|
||||
result = append(result, g.Keywords["then"]...)
|
||||
result = append(result, g.Keywords["and"]...)
|
||||
result = append(result, g.Keywords["but"]...)
|
||||
return result
|
||||
}
|
||||
|
||||
func (g *GherkinDialect) BackgroundKeywords() []string {
|
||||
return g.Keywords["background"]
|
||||
}
|
||||
|
||||
func (g *GherkinDialect) ScenarioOutlineKeywords() []string {
|
||||
return g.Keywords["scenarioOutline"]
|
||||
}
|
||||
|
||||
func (g *GherkinDialect) ExamplesKeywords() []string {
|
||||
return g.Keywords["examples"]
|
||||
}
|
||||
|
||||
type GherkinDialectProvider interface {
|
||||
GetDialect(language string) *GherkinDialect
|
||||
}
|
||||
|
||||
type gherkinDialectMap map[string]*GherkinDialect
|
||||
|
||||
func (g gherkinDialectMap) GetDialect(language string) *GherkinDialect {
|
||||
return g[language]
|
||||
}
|
2988
gherkin/dialects_builtin.go
Обычный файл
2988
gherkin/dialects_builtin.go
Обычный файл
Различия файлов не показаны, т.к. их слишком много
Показать различия
137
gherkin/gherkin.go
Обычный файл
137
gherkin/gherkin.go
Обычный файл
|
@ -0,0 +1,137 @@
|
|||
package gherkin
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Parser interface {
|
||||
StopAtFirstError(b bool)
|
||||
Parse(s Scanner, m Matcher) (err error)
|
||||
}
|
||||
|
||||
/*
|
||||
The scanner reads a gherkin doc (typically read from a .feature file) and creates a token for
|
||||
each line. The tokens are passed to the parser, which outputs an AST (Abstract Syntax Tree).
|
||||
|
||||
If the scanner sees a # language header, it will reconfigure itself dynamically to look for
|
||||
Gherkin keywords for the associated language. The keywords are defined in gherkin-languages.json.
|
||||
*/
|
||||
type Scanner interface {
|
||||
Scan() (line *Line, atEof bool, err error)
|
||||
}
|
||||
|
||||
type Builder interface {
|
||||
Build(*Token) (bool, error)
|
||||
StartRule(RuleType) (bool, error)
|
||||
EndRule(RuleType) (bool, error)
|
||||
Reset()
|
||||
}
|
||||
|
||||
type Token struct {
|
||||
Type TokenType
|
||||
Keyword string
|
||||
Text string
|
||||
Items []*LineSpan
|
||||
GherkinDialect string
|
||||
Indent string
|
||||
Location *Location
|
||||
}
|
||||
|
||||
func (t *Token) IsEOF() bool {
|
||||
return t.Type == TokenType_EOF
|
||||
}
|
||||
func (t *Token) String() string {
|
||||
return fmt.Sprintf("%s: %s/%s", t.Type.Name(), t.Keyword, t.Text)
|
||||
}
|
||||
|
||||
type LineSpan struct {
|
||||
Column int
|
||||
Text string
|
||||
}
|
||||
|
||||
func (l *LineSpan) String() string {
|
||||
return fmt.Sprintf("%d:%s", l.Column, l.Text)
|
||||
}
|
||||
|
||||
type parser struct {
|
||||
builder Builder
|
||||
stopAtFirstError bool
|
||||
}
|
||||
|
||||
func NewParser(b Builder) Parser {
|
||||
return &parser{
|
||||
builder: b,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) StopAtFirstError(b bool) {
|
||||
p.stopAtFirstError = b
|
||||
}
|
||||
|
||||
func NewScanner(r io.Reader) Scanner {
|
||||
return &scanner{
|
||||
s: bufio.NewScanner(r),
|
||||
line: 0,
|
||||
}
|
||||
}
|
||||
|
||||
type scanner struct {
|
||||
s *bufio.Scanner
|
||||
line int
|
||||
}
|
||||
|
||||
func (t *scanner) Scan() (line *Line, atEof bool, err error) {
|
||||
scanning := t.s.Scan()
|
||||
if !scanning {
|
||||
err = t.s.Err()
|
||||
if err == nil {
|
||||
atEof = true
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
t.line += 1
|
||||
str := t.s.Text()
|
||||
line = &Line{str, t.line, strings.TrimLeft(str, " \t"), atEof}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type Line struct {
|
||||
LineText string
|
||||
LineNumber int
|
||||
TrimmedLineText string
|
||||
AtEof bool
|
||||
}
|
||||
|
||||
func (g *Line) Indent() int {
|
||||
return len(g.LineText) - len(g.TrimmedLineText)
|
||||
}
|
||||
|
||||
func (g *Line) IsEmpty() bool {
|
||||
return len(g.TrimmedLineText) == 0
|
||||
}
|
||||
|
||||
func (g *Line) IsEof() bool {
|
||||
return g.AtEof
|
||||
}
|
||||
|
||||
func (g *Line) StartsWith(prefix string) bool {
|
||||
return strings.HasPrefix(g.TrimmedLineText, prefix)
|
||||
}
|
||||
|
||||
func ParseFeature(in io.Reader) (feature *Feature, err error) {
|
||||
|
||||
builder := NewAstBuilder()
|
||||
parser := NewParser(builder)
|
||||
parser.StopAtFirstError(false)
|
||||
matcher := NewMatcher(GherkinDialectsBuildin())
|
||||
|
||||
scanner := NewScanner(in)
|
||||
|
||||
err = parser.Parse(scanner, matcher)
|
||||
|
||||
return builder.GetFeature(), err
|
||||
}
|
270
gherkin/matcher.go
Обычный файл
270
gherkin/matcher.go
Обычный файл
|
@ -0,0 +1,270 @@
|
|||
package gherkin
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
const (
|
||||
DEFAULT_DIALECT = "en"
|
||||
COMMENT_PREFIX = "#"
|
||||
TAG_PREFIX = "@"
|
||||
TITLE_KEYWORD_SEPARATOR = ":"
|
||||
TABLE_CELL_SEPARATOR = '|'
|
||||
ESCAPE_CHAR = '\\'
|
||||
ESCAPED_NEWLINE = 'n'
|
||||
DOCSTRING_SEPARATOR = "\"\"\""
|
||||
DOCSTRING_ALTERNATIVE_SEPARATOR = "```"
|
||||
)
|
||||
|
||||
type matcher struct {
|
||||
gdp GherkinDialectProvider
|
||||
default_lang string
|
||||
lang string
|
||||
dialect *GherkinDialect
|
||||
activeDocStringSeparator string
|
||||
indentToRemove int
|
||||
languagePattern *regexp.Regexp
|
||||
}
|
||||
|
||||
func NewMatcher(gdp GherkinDialectProvider) Matcher {
|
||||
return &matcher{
|
||||
gdp: gdp,
|
||||
default_lang: DEFAULT_DIALECT,
|
||||
lang: DEFAULT_DIALECT,
|
||||
dialect: gdp.GetDialect(DEFAULT_DIALECT),
|
||||
languagePattern: regexp.MustCompile("^\\s*#\\s*language\\s*:\\s*([a-zA-Z\\-_]+)\\s*$"),
|
||||
}
|
||||
}
|
||||
|
||||
func NewLanguageMatcher(gdp GherkinDialectProvider, language string) Matcher {
|
||||
return &matcher{
|
||||
gdp: gdp,
|
||||
default_lang: language,
|
||||
lang: language,
|
||||
dialect: gdp.GetDialect(language),
|
||||
languagePattern: regexp.MustCompile("^\\s*#\\s*language\\s*:\\s*([a-zA-Z\\-_]+)\\s*$"),
|
||||
}
|
||||
}
|
||||
|
||||
func (m *matcher) Reset() {
|
||||
m.indentToRemove = 0
|
||||
m.activeDocStringSeparator = ""
|
||||
if m.lang != "en" {
|
||||
m.dialect = m.gdp.GetDialect(m.default_lang)
|
||||
m.lang = "en"
|
||||
}
|
||||
}
|
||||
|
||||
func (m *matcher) newTokenAtLocation(line, index int) (token *Token) {
|
||||
column := index + 1
|
||||
token = new(Token)
|
||||
token.GherkinDialect = m.lang
|
||||
token.Location = &Location{line, column}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) MatchEOF(line *Line) (ok bool, token *Token, err error) {
|
||||
if line.IsEof() {
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, line.Indent()), true
|
||||
token.Type = TokenType_EOF
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) MatchEmpty(line *Line) (ok bool, token *Token, err error) {
|
||||
if line.IsEmpty() {
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, line.Indent()), true
|
||||
token.Type = TokenType_Empty
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) MatchComment(line *Line) (ok bool, token *Token, err error) {
|
||||
if line.StartsWith(COMMENT_PREFIX) {
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, 0), true
|
||||
token.Type = TokenType_Comment
|
||||
token.Text = line.LineText
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) MatchTagLine(line *Line) (ok bool, token *Token, err error) {
|
||||
if line.StartsWith(TAG_PREFIX) {
|
||||
var tags []*LineSpan
|
||||
var column = line.Indent()
|
||||
splits := strings.Split(line.TrimmedLineText, TAG_PREFIX)
|
||||
for i := range splits {
|
||||
txt := strings.Trim(splits[i], " ")
|
||||
if txt != "" {
|
||||
tags = append(tags, &LineSpan{column, TAG_PREFIX + txt})
|
||||
}
|
||||
column = column + len(splits[i]) + 1
|
||||
}
|
||||
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, line.Indent()), true
|
||||
token.Type = TokenType_TagLine
|
||||
token.Items = tags
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) matchTitleLine(line *Line, tokenType TokenType, keywords []string) (ok bool, token *Token, err error) {
|
||||
for i := range keywords {
|
||||
keyword := keywords[i]
|
||||
if line.StartsWith(keyword + TITLE_KEYWORD_SEPARATOR) {
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, line.Indent()), true
|
||||
token.Type = tokenType
|
||||
token.Keyword = keyword
|
||||
token.Text = strings.Trim(line.TrimmedLineText[len(keyword)+1:], " ")
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) MatchFeatureLine(line *Line) (ok bool, token *Token, err error) {
|
||||
return m.matchTitleLine(line, TokenType_FeatureLine, m.dialect.FeatureKeywords())
|
||||
}
|
||||
func (m *matcher) MatchBackgroundLine(line *Line) (ok bool, token *Token, err error) {
|
||||
return m.matchTitleLine(line, TokenType_BackgroundLine, m.dialect.BackgroundKeywords())
|
||||
}
|
||||
func (m *matcher) MatchScenarioLine(line *Line) (ok bool, token *Token, err error) {
|
||||
return m.matchTitleLine(line, TokenType_ScenarioLine, m.dialect.ScenarioKeywords())
|
||||
}
|
||||
func (m *matcher) MatchScenarioOutlineLine(line *Line) (ok bool, token *Token, err error) {
|
||||
return m.matchTitleLine(line, TokenType_ScenarioOutlineLine, m.dialect.ScenarioOutlineKeywords())
|
||||
}
|
||||
func (m *matcher) MatchExamplesLine(line *Line) (ok bool, token *Token, err error) {
|
||||
return m.matchTitleLine(line, TokenType_ExamplesLine, m.dialect.ExamplesKeywords())
|
||||
}
|
||||
func (m *matcher) MatchStepLine(line *Line) (ok bool, token *Token, err error) {
|
||||
keywords := m.dialect.StepKeywords()
|
||||
for i := range keywords {
|
||||
keyword := keywords[i]
|
||||
if line.StartsWith(keyword) {
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, line.Indent()), true
|
||||
token.Type = TokenType_StepLine
|
||||
token.Keyword = keyword
|
||||
token.Text = strings.Trim(line.TrimmedLineText[len(keyword):], " ")
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) MatchDocStringSeparator(line *Line) (ok bool, token *Token, err error) {
|
||||
if m.activeDocStringSeparator != "" {
|
||||
if line.StartsWith(m.activeDocStringSeparator) {
|
||||
// close
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, line.Indent()), true
|
||||
token.Type = TokenType_DocStringSeparator
|
||||
|
||||
m.indentToRemove = 0
|
||||
m.activeDocStringSeparator = ""
|
||||
}
|
||||
return
|
||||
}
|
||||
if line.StartsWith(DOCSTRING_SEPARATOR) {
|
||||
m.activeDocStringSeparator = DOCSTRING_SEPARATOR
|
||||
} else if line.StartsWith(DOCSTRING_ALTERNATIVE_SEPARATOR) {
|
||||
m.activeDocStringSeparator = DOCSTRING_ALTERNATIVE_SEPARATOR
|
||||
}
|
||||
if m.activeDocStringSeparator != "" {
|
||||
// open
|
||||
contentType := line.TrimmedLineText[len(m.activeDocStringSeparator):]
|
||||
m.indentToRemove = line.Indent()
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, line.Indent()), true
|
||||
token.Type = TokenType_DocStringSeparator
|
||||
token.Text = contentType
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) MatchTableRow(line *Line) (ok bool, token *Token, err error) {
|
||||
var firstChar, firstPos = utf8.DecodeRuneInString(line.TrimmedLineText)
|
||||
if firstChar == TABLE_CELL_SEPARATOR {
|
||||
var cells []*LineSpan
|
||||
var cell []rune
|
||||
var startCol = line.Indent() + 2 // column where the current cell started
|
||||
// start after the first separator, it's not included in the cell
|
||||
for i, w, col := firstPos, 0, startCol; i < len(line.TrimmedLineText); i += w {
|
||||
var char rune
|
||||
char, w = utf8.DecodeRuneInString(line.TrimmedLineText[i:])
|
||||
if char == TABLE_CELL_SEPARATOR {
|
||||
// append current cell
|
||||
txt := string(cell)
|
||||
txtTrimmed := strings.TrimLeft(txt, " ")
|
||||
ind := len(txt) - len(txtTrimmed)
|
||||
cells = append(cells, &LineSpan{startCol + ind, strings.TrimRight(txtTrimmed, " ")})
|
||||
// start building next
|
||||
cell = make([]rune, 0)
|
||||
startCol = col + 1
|
||||
} else if char == ESCAPE_CHAR {
|
||||
// skip this character but count the column
|
||||
i += w
|
||||
col++
|
||||
char, w = utf8.DecodeRuneInString(line.TrimmedLineText[i:])
|
||||
if char == ESCAPED_NEWLINE {
|
||||
cell = append(cell, '\n')
|
||||
} else {
|
||||
if char != TABLE_CELL_SEPARATOR && char != ESCAPE_CHAR {
|
||||
cell = append(cell, ESCAPE_CHAR)
|
||||
}
|
||||
cell = append(cell, char)
|
||||
}
|
||||
} else {
|
||||
cell = append(cell, char)
|
||||
}
|
||||
col++
|
||||
}
|
||||
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, line.Indent()), true
|
||||
token.Type = TokenType_TableRow
|
||||
token.Items = cells
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) MatchLanguage(line *Line) (ok bool, token *Token, err error) {
|
||||
matches := m.languagePattern.FindStringSubmatch(line.TrimmedLineText)
|
||||
if len(matches) > 0 {
|
||||
lang := matches[1]
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, line.Indent()), true
|
||||
token.Type = TokenType_Language
|
||||
token.Text = lang
|
||||
|
||||
dialect := m.gdp.GetDialect(lang)
|
||||
if dialect == nil {
|
||||
err = &parseError{"Language not supported: " + lang, token.Location}
|
||||
} else {
|
||||
m.lang = lang
|
||||
m.dialect = dialect
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) MatchOther(line *Line) (ok bool, token *Token, err error) {
|
||||
token, ok = m.newTokenAtLocation(line.LineNumber, 0), true
|
||||
token.Type = TokenType_Other
|
||||
|
||||
element := line.LineText
|
||||
txt := strings.TrimLeft(element, " ")
|
||||
|
||||
if len(element)-len(txt) > m.indentToRemove {
|
||||
token.Text = m.unescapeDocString(element[m.indentToRemove:])
|
||||
} else {
|
||||
token.Text = m.unescapeDocString(txt)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (m *matcher) unescapeDocString(text string) string {
|
||||
if m.activeDocStringSeparator != "" {
|
||||
return strings.Replace(text, "\\\"\\\"\\\"", "\"\"\"", -1)
|
||||
} else {
|
||||
return text
|
||||
}
|
||||
}
|
2270
gherkin/parser.go
Обычный файл
2270
gherkin/parser.go
Обычный файл
Различия файлов не показаны, т.к. их слишком много
Показать различия
|
@ -7,7 +7,7 @@ import (
|
|||
"runtime"
|
||||
"strconv"
|
||||
|
||||
"gopkg.in/cucumber/gherkin-go.v3"
|
||||
"github.com/DATA-DOG/godog/gherkin"
|
||||
)
|
||||
|
||||
// StepDef is a registered step definition
|
||||
|
|
2
suite.go
2
suite.go
|
@ -10,7 +10,7 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/cucumber/gherkin-go.v3"
|
||||
"github.com/DATA-DOG/godog/gherkin"
|
||||
)
|
||||
|
||||
var errorInterface = reflect.TypeOf((*error)(nil)).Elem()
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
"strings"
|
||||
"testing"
|
||||
|
||||
"gopkg.in/cucumber/gherkin-go.v3"
|
||||
"github.com/DATA-DOG/godog/gherkin"
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
|
|
Загрузка…
Создание таблицы
Сослаться в новой задаче