Add missing vendor dependencies
This commit is contained in:
parent
7539f71aed
commit
5aec7517ca
@ -22,7 +22,7 @@ func (m MockEntrypoint) GetNamespace() (namespace string) {
|
|||||||
|
|
||||||
func NewEntrypointInNamespace(namespace string) MockEntrypoint {
|
func NewEntrypointInNamespace(namespace string) MockEntrypoint {
|
||||||
return MockEntrypoint{
|
return MockEntrypoint{
|
||||||
client: client,
|
client: NewClient(),
|
||||||
namespace: namespace,
|
namespace: namespace,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
137
vendor/github.com/onsi/ginkgo/CHANGELOG.md
generated
vendored
Normal file
137
vendor/github.com/onsi/ginkgo/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
## HEAD
|
||||||
|
|
||||||
|
Improvements:
|
||||||
|
|
||||||
|
- `Skip(message)` can be used to skip the current test.
|
||||||
|
- Added `extensions/table` - a Ginkgo DSL for [Table Driven Tests](http://onsi.github.io/ginkgo/#table-driven-tests)
|
||||||
|
- Add `GinkgoRandomSeed()` - shorthand for `config.GinkgoConfig.RandomSeed`
|
||||||
|
|
||||||
|
Bug Fixes:
|
||||||
|
|
||||||
|
- Ginkgo tests now fail when you `panic(nil)` (#167)
|
||||||
|
|
||||||
|
## 1.2.0 5/31/2015
|
||||||
|
|
||||||
|
Improvements
|
||||||
|
|
||||||
|
- `ginkgo -coverpkg` calls down to `go test -coverpkg` (#160)
|
||||||
|
- `ginkgo -afterSuiteHook COMMAND` invokes the passed-in `COMMAND` after a test suite completes (#152)
|
||||||
|
- Relaxed requirement for Go 1.4+. `ginkgo` now works with Go v1.3+ (#166)
|
||||||
|
|
||||||
|
## 1.2.0-beta
|
||||||
|
|
||||||
|
Ginkgo now requires Go 1.4+
|
||||||
|
|
||||||
|
Improvements:
|
||||||
|
|
||||||
|
- Call reporters in reverse order when announcing spec completion -- allows custom reporters to emit output before the default reporter does.
|
||||||
|
- Improved focus behavior. Now, this:
|
||||||
|
|
||||||
|
```golang
|
||||||
|
FDescribe("Some describe", func() {
|
||||||
|
It("A", func() {})
|
||||||
|
|
||||||
|
FIt("B", func() {})
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
will run `B` but *not* `A`. This tends to be a common usage pattern when in the thick of writing and debugging tests.
|
||||||
|
- When `SIGINT` is received, Ginkgo will emit the contents of the `GinkgoWriter` before running the `AfterSuite`. Useful for debugging stuck tests.
|
||||||
|
- When `--progress` is set, Ginkgo will write test progress (in particular, Ginkgo will say when it is about to run a BeforeEach, AfterEach, It, etc...) to the `GinkgoWriter`. This is useful for debugging stuck tests and tests that generate many logs.
|
||||||
|
- Improved output when an error occurs in a setup or teardown block.
|
||||||
|
- When `--dryRun` is set, Ginkgo will walk the spec tree and emit to its reporter *without* actually running anything. Best paired with `-v` to understand which specs will run in which order.
|
||||||
|
- Add `By` to help document long `It`s. `By` simply writes to the `GinkgoWriter`.
|
||||||
|
- Add support for precompiled tests:
|
||||||
|
- `ginkgo build <path-to-package>` will now compile the package, producing a file named `package.test`
|
||||||
|
- The compiled `package.test` file can be run directly. This runs the tests in series.
|
||||||
|
- To run precompiled tests in parallel, you can run: `ginkgo -p package.test`
|
||||||
|
- Support `bootstrap`ping and `generate`ing [Agouti](http://agouti.org) specs.
|
||||||
|
- `ginkgo generate` and `ginkgo bootstrap` now honor the package name already defined in a given directory
|
||||||
|
- The `ginkgo` CLI ignores `SIGQUIT`. Prevents its stack dump from interlacing with the underlying test suite's stack dump.
|
||||||
|
- The `ginkgo` CLI now compiles tests into a temporary directory instead of the package directory. This necessitates upgrading to Go v1.4+.
|
||||||
|
- `ginkgo -notify` now works on Linux
|
||||||
|
|
||||||
|
Bug Fixes:
|
||||||
|
|
||||||
|
- If --skipPackages is used and all packages are skipped, Ginkgo should exit 0.
|
||||||
|
- Fix tempfile leak when running in parallel
|
||||||
|
- Fix incorrect failure message when a panic occurs during a parallel test run
|
||||||
|
- Fixed an issue where a pending test within a focused context (or a focused test within a pending context) would skip all other tests.
|
||||||
|
- Be more consistent about handling SIGTERM as well as SIGINT
|
||||||
|
- When interupted while concurrently compiling test suites in the background, Ginkgo now cleans up the compiled artifacts.
|
||||||
|
- Fixed a long standing bug where `ginkgo -p` would hang if a process spawned by one of the Ginkgo parallel nodes does not exit. (Hooray!)
|
||||||
|
|
||||||
|
## 1.1.0 (8/2/2014)
|
||||||
|
|
||||||
|
No changes, just dropping the beta.
|
||||||
|
|
||||||
|
## 1.1.0-beta (7/22/2014)
|
||||||
|
New Features:
|
||||||
|
|
||||||
|
- `ginkgo watch` now monitors packages *and their dependencies* for changes. The depth of the dependency tree can be modified with the `-depth` flag.
|
||||||
|
- Test suites with a programmatic focus (`FIt`, `FDescribe`, etc...) exit with non-zero status code, even when they pass. This allows CI systems to detect accidental commits of focused test suites.
|
||||||
|
- `ginkgo -p` runs the testsuite in parallel with an auto-detected number of nodes.
|
||||||
|
- `ginkgo -tags=TAG_LIST` passes a list of tags down to the `go build` command.
|
||||||
|
- `ginkgo --failFast` aborts the test suite after the first failure.
|
||||||
|
- `ginkgo generate file_1 file_2` can take multiple file arguments.
|
||||||
|
- Ginkgo now summarizes any spec failures that occured at the end of the test run.
|
||||||
|
- `ginkgo --randomizeSuites` will run tests *suites* in random order using the generated/passed-in seed.
|
||||||
|
|
||||||
|
Improvements:
|
||||||
|
|
||||||
|
- `ginkgo -skipPackage` now takes a comma-separated list of strings. If the *relative path* to a package matches one of the entries in the comma-separated list, that package is skipped.
|
||||||
|
- `ginkgo --untilItFails` no longer recompiles between attempts.
|
||||||
|
- Ginkgo now panics when a runnable node (`It`, `BeforeEach`, `JustBeforeEach`, `AfterEach`, `Measure`) is nested within another runnable node. This is always a mistake. Any test suites that panic because of this change should be fixed.
|
||||||
|
|
||||||
|
Bug Fixes:
|
||||||
|
|
||||||
|
- `ginkgo boostrap` and `ginkgo generate` no longer fail when dealing with `hyphen-separated-packages`.
|
||||||
|
- parallel specs are now better distributed across nodes - fixed a crashing bug where (for example) distributing 11 tests across 7 nodes would panic
|
||||||
|
|
||||||
|
## 1.0.0 (5/24/2014)
|
||||||
|
New Features:
|
||||||
|
|
||||||
|
- Add `GinkgoParallelNode()` - shorthand for `config.GinkgoConfig.ParallelNode`
|
||||||
|
|
||||||
|
Improvements:
|
||||||
|
|
||||||
|
- When compilation fails, the compilation output is rewritten to present a correct *relative* path. Allows ⌘-clicking in iTerm open the file in your text editor.
|
||||||
|
- `--untilItFails` and `ginkgo watch` now generate new random seeds between test runs, unless a particular random seed is specified.
|
||||||
|
|
||||||
|
Bug Fixes:
|
||||||
|
|
||||||
|
- `-cover` now generates a correctly combined coverprofile when running with in parallel with multiple `-node`s.
|
||||||
|
- Print out the contents of the `GinkgoWriter` when `BeforeSuite` or `AfterSuite` fail.
|
||||||
|
- Fix all remaining race conditions in Ginkgo's test suite.
|
||||||
|
|
||||||
|
## 1.0.0-beta (4/14/2014)
|
||||||
|
Breaking changes:
|
||||||
|
|
||||||
|
- `thirdparty/gomocktestreporter` is gone. Use `GinkgoT()` instead
|
||||||
|
- Modified the Reporter interface
|
||||||
|
- `watch` is now a subcommand, not a flag.
|
||||||
|
|
||||||
|
DSL changes:
|
||||||
|
|
||||||
|
- `BeforeSuite` and `AfterSuite` for setting up and tearing down test suites.
|
||||||
|
- `AfterSuite` is triggered on interrupt (`^C`) as well as exit.
|
||||||
|
- `SynchronizedBeforeSuite` and `SynchronizedAfterSuite` for setting up and tearing down singleton resources across parallel nodes.
|
||||||
|
|
||||||
|
CLI changes:
|
||||||
|
|
||||||
|
- `watch` is now a subcommand, not a flag
|
||||||
|
- `--nodot` flag can be passed to `ginkgo generate` and `ginkgo bootstrap` to avoid dot imports. This explicitly imports all exported identifiers in Ginkgo and Gomega. Refreshing this list can be done by running `ginkgo nodot`
|
||||||
|
- Additional arguments can be passed to specs. Pass them after the `--` separator
|
||||||
|
- `--skipPackage` flag takes a regexp and ignores any packages with package names passing said regexp.
|
||||||
|
- `--trace` flag prints out full stack traces when errors occur, not just the line at which the error occurs.
|
||||||
|
|
||||||
|
Misc:
|
||||||
|
|
||||||
|
- Start using semantic versioning
|
||||||
|
- Start maintaining changelog
|
||||||
|
|
||||||
|
Major refactor:
|
||||||
|
|
||||||
|
- Pull out Ginkgo's internal to `internal`
|
||||||
|
- Rename `example` everywhere to `spec`
|
||||||
|
- Much more!
|
20
vendor/github.com/onsi/ginkgo/LICENSE
generated
vendored
Normal file
20
vendor/github.com/onsi/ginkgo/LICENSE
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
Copyright (c) 2013-2014 Onsi Fakhouri
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
115
vendor/github.com/onsi/ginkgo/README.md
generated
vendored
Normal file
115
vendor/github.com/onsi/ginkgo/README.md
generated
vendored
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
![Ginkgo: A Golang BDD Testing Framework](http://onsi.github.io/ginkgo/images/ginkgo.png)
|
||||||
|
|
||||||
|
[![Build Status](https://travis-ci.org/onsi/ginkgo.png)](https://travis-ci.org/onsi/ginkgo)
|
||||||
|
|
||||||
|
Jump to the [docs](http://onsi.github.io/ginkgo/) to learn more. To start rolling your Ginkgo tests *now* [keep reading](#set-me-up)!
|
||||||
|
|
||||||
|
To discuss Ginkgo and get updates, join the [google group](https://groups.google.com/d/forum/ginkgo-and-gomega).
|
||||||
|
|
||||||
|
## Feature List
|
||||||
|
|
||||||
|
- Ginkgo uses Go's `testing` package and can live alongside your existing `testing` tests. It's easy to [bootstrap](http://onsi.github.io/ginkgo/#bootstrapping-a-suite) and start writing your [first tests](http://onsi.github.io/ginkgo/#adding-specs-to-a-suite)
|
||||||
|
|
||||||
|
- Structure your BDD-style tests expressively:
|
||||||
|
- Nestable [`Describe` and `Context` container blocks](http://onsi.github.io/ginkgo/#organizing-specs-with-containers-describe-and-context)
|
||||||
|
- [`BeforeEach` and `AfterEach` blocks](http://onsi.github.io/ginkgo/#extracting-common-setup-beforeeach) for setup and teardown
|
||||||
|
- [`It` blocks](http://onsi.github.io/ginkgo/#individual-specs-) that hold your assertions
|
||||||
|
- [`JustBeforeEach` blocks](http://onsi.github.io/ginkgo/#separating-creation-and-configuration-justbeforeeach) that separate creation from configuration (also known as the subject action pattern).
|
||||||
|
- [`BeforeSuite` and `AfterSuite` blocks](http://onsi.github.io/ginkgo/#global-setup-and-teardown-beforesuite-and-aftersuite) to prep for and cleanup after a suite.
|
||||||
|
|
||||||
|
- A comprehensive test runner that lets you:
|
||||||
|
- Mark specs as [pending](http://onsi.github.io/ginkgo/#pending-specs)
|
||||||
|
- [Focus](http://onsi.github.io/ginkgo/#focused-specs) individual specs, and groups of specs, either programmatically or on the command line
|
||||||
|
- Run your tests in [random order](http://onsi.github.io/ginkgo/#spec-permutation), and then reuse random seeds to replicate the same order.
|
||||||
|
- Break up your test suite into parallel processes for straightforward [test parallelization](http://onsi.github.io/ginkgo/#parallel-specs)
|
||||||
|
|
||||||
|
- `ginkgo`: a command line interface with plenty of handy command line arguments for [running your tests](http://onsi.github.io/ginkgo/#running-tests) and [generating](http://onsi.github.io/ginkgo/#generators) test files. Here are a few choice examples:
|
||||||
|
- `ginkgo -nodes=N` runs your tests in `N` parallel processes and print out coherent output in realtime
|
||||||
|
- `ginkgo -cover` runs your tests using Golang's code coverage tool
|
||||||
|
- `ginkgo convert` converts an XUnit-style `testing` package to a Ginkgo-style package
|
||||||
|
- `ginkgo -focus="REGEXP"` and `ginkgo -skip="REGEXP"` allow you to specify a subset of tests to run via regular expression
|
||||||
|
- `ginkgo -r` runs all tests suites under the current directory
|
||||||
|
- `ginkgo -v` prints out identifying information for each tests just before it runs
|
||||||
|
|
||||||
|
And much more: run `ginkgo help` for details!
|
||||||
|
|
||||||
|
The `ginkgo` CLI is convenient, but purely optional -- Ginkgo works just fine with `go test`
|
||||||
|
|
||||||
|
- `ginkgo watch` [watches](https://onsi.github.io/ginkgo/#watching-for-changes) packages *and their dependencies* for changes, then reruns tests. Run tests immediately as you develop!
|
||||||
|
|
||||||
|
- Built-in support for testing [asynchronicity](http://onsi.github.io/ginkgo/#asynchronous-tests)
|
||||||
|
|
||||||
|
- Built-in support for [benchmarking](http://onsi.github.io/ginkgo/#benchmark-tests) your code. Control the number of benchmark samples as you gather runtimes and other, arbitrary, bits of numerical information about your code.
|
||||||
|
|
||||||
|
- [Completions for Sublime Text](https://github.com/onsi/ginkgo-sublime-completions): just use [Package Control](https://sublime.wbond.net/) to install `Ginkgo Completions`.
|
||||||
|
|
||||||
|
- Straightforward support for third-party testing libraries such as [Gomock](https://code.google.com/p/gomock/) and [Testify](https://github.com/stretchr/testify). Check out the [docs](http://onsi.github.io/ginkgo/#third-party-integrations) for details.
|
||||||
|
|
||||||
|
- A modular architecture that lets you easily:
|
||||||
|
- Write [custom reporters](http://onsi.github.io/ginkgo/#writing-custom-reporters) (for example, Ginkgo comes with a [JUnit XML reporter](http://onsi.github.io/ginkgo/#generating-junit-xml-output) and a TeamCity reporter).
|
||||||
|
- [Adapt an existing matcher library (or write your own!)](http://onsi.github.io/ginkgo/#using-other-matcher-libraries) to work with Ginkgo
|
||||||
|
|
||||||
|
## [Gomega](http://github.com/onsi/gomega): Ginkgo's Preferred Matcher Library
|
||||||
|
|
||||||
|
Ginkgo is best paired with Gomega. Learn more about Gomega [here](http://onsi.github.io/gomega/)
|
||||||
|
|
||||||
|
## [Agouti](http://github.com/sclevine/agouti): A Golang Acceptance Testing Framework
|
||||||
|
|
||||||
|
Agouti allows you run WebDriver integration tests. Learn more about Agouti [here](http://agouti.org)
|
||||||
|
|
||||||
|
## Set Me Up!
|
||||||
|
|
||||||
|
You'll need Golang v1.3+ (Ubuntu users: you probably have Golang v1.0 -- you'll need to upgrade!)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
|
||||||
|
go get github.com/onsi/ginkgo/ginkgo # installs the ginkgo CLI
|
||||||
|
go get github.com/onsi/gomega # fetches the matcher library
|
||||||
|
|
||||||
|
cd path/to/package/you/want/to/test
|
||||||
|
|
||||||
|
ginkgo bootstrap # set up a new ginkgo suite
|
||||||
|
ginkgo generate # will create a sample test file. edit this file and add your tests then...
|
||||||
|
|
||||||
|
go test # to run your tests
|
||||||
|
|
||||||
|
ginkgo # also runs your tests
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
## I'm new to Go: What are my testing options?
|
||||||
|
|
||||||
|
Of course, I heartily recommend [Ginkgo](https://github.com/onsi/ginkgo) and [Gomega](https://github.com/onsi/gomega). Both packages are seeing heavy, daily, production use on a number of projects and boast a mature and comprehensive feature-set.
|
||||||
|
|
||||||
|
With that said, it's great to know what your options are :)
|
||||||
|
|
||||||
|
### What Golang gives you out of the box
|
||||||
|
|
||||||
|
Testing is a first class citizen in Golang, however Go's built-in testing primitives are somewhat limited: The [testing](http://golang.org/pkg/testing) package provides basic XUnit style tests and no assertion library.
|
||||||
|
|
||||||
|
### Matcher libraries for Golang's XUnit style tests
|
||||||
|
|
||||||
|
A number of matcher libraries have been written to augment Go's built-in XUnit style tests. Here are two that have gained traction:
|
||||||
|
|
||||||
|
- [testify](https://github.com/stretchr/testify)
|
||||||
|
- [gocheck](http://labix.org/gocheck)
|
||||||
|
|
||||||
|
You can also use Ginkgo's matcher library [Gomega](https://github.com/onsi/gomega) in [XUnit style tests](http://onsi.github.io/gomega/#using-gomega-with-golangs-xunitstyle-tests)
|
||||||
|
|
||||||
|
### BDD style testing frameworks
|
||||||
|
|
||||||
|
There are a handful of BDD-style testing frameworks written for Golang. Here are a few:
|
||||||
|
|
||||||
|
- [Ginkgo](https://github.com/onsi/ginkgo) ;)
|
||||||
|
- [GoConvey](https://github.com/smartystreets/goconvey)
|
||||||
|
- [Goblin](https://github.com/franela/goblin)
|
||||||
|
- [Mao](https://github.com/azer/mao)
|
||||||
|
- [Zen](https://github.com/pranavraja/zen)
|
||||||
|
|
||||||
|
Finally, @shageman has [put together](https://github.com/shageman/gotestit) a comprehensive comparison of golang testing libraries.
|
||||||
|
|
||||||
|
Go explore!
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Ginkgo is MIT-Licensed
|
187
vendor/github.com/onsi/ginkgo/config/config.go
generated
vendored
Normal file
187
vendor/github.com/onsi/ginkgo/config/config.go
generated
vendored
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
/*
|
||||||
|
Ginkgo accepts a number of configuration options.
|
||||||
|
|
||||||
|
These are documented [here](http://onsi.github.io/ginkgo/#the_ginkgo_cli)
|
||||||
|
|
||||||
|
You can also learn more via
|
||||||
|
|
||||||
|
ginkgo help
|
||||||
|
|
||||||
|
or (I kid you not):
|
||||||
|
|
||||||
|
go test -asdf
|
||||||
|
*/
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
const VERSION = "1.2.0"
|
||||||
|
|
||||||
|
type GinkgoConfigType struct {
|
||||||
|
RandomSeed int64
|
||||||
|
RandomizeAllSpecs bool
|
||||||
|
RegexScansFilePath bool
|
||||||
|
FocusString string
|
||||||
|
SkipString string
|
||||||
|
SkipMeasurements bool
|
||||||
|
FailOnPending bool
|
||||||
|
FailFast bool
|
||||||
|
FlakeAttempts int
|
||||||
|
EmitSpecProgress bool
|
||||||
|
DryRun bool
|
||||||
|
|
||||||
|
ParallelNode int
|
||||||
|
ParallelTotal int
|
||||||
|
SyncHost string
|
||||||
|
StreamHost string
|
||||||
|
}
|
||||||
|
|
||||||
|
var GinkgoConfig = GinkgoConfigType{}
|
||||||
|
|
||||||
|
type DefaultReporterConfigType struct {
|
||||||
|
NoColor bool
|
||||||
|
SlowSpecThreshold float64
|
||||||
|
NoisyPendings bool
|
||||||
|
Succinct bool
|
||||||
|
Verbose bool
|
||||||
|
FullTrace bool
|
||||||
|
}
|
||||||
|
|
||||||
|
var DefaultReporterConfig = DefaultReporterConfigType{}
|
||||||
|
|
||||||
|
func processPrefix(prefix string) string {
|
||||||
|
if prefix != "" {
|
||||||
|
prefix = prefix + "."
|
||||||
|
}
|
||||||
|
return prefix
|
||||||
|
}
|
||||||
|
|
||||||
|
func Flags(flagSet *flag.FlagSet, prefix string, includeParallelFlags bool) {
|
||||||
|
prefix = processPrefix(prefix)
|
||||||
|
flagSet.Int64Var(&(GinkgoConfig.RandomSeed), prefix+"seed", time.Now().Unix(), "The seed used to randomize the spec suite.")
|
||||||
|
flagSet.BoolVar(&(GinkgoConfig.RandomizeAllSpecs), prefix+"randomizeAllSpecs", false, "If set, ginkgo will randomize all specs together. By default, ginkgo only randomizes the top level Describe/Context groups.")
|
||||||
|
flagSet.BoolVar(&(GinkgoConfig.SkipMeasurements), prefix+"skipMeasurements", false, "If set, ginkgo will skip any measurement specs.")
|
||||||
|
flagSet.BoolVar(&(GinkgoConfig.FailOnPending), prefix+"failOnPending", false, "If set, ginkgo will mark the test suite as failed if any specs are pending.")
|
||||||
|
flagSet.BoolVar(&(GinkgoConfig.FailFast), prefix+"failFast", false, "If set, ginkgo will stop running a test suite after a failure occurs.")
|
||||||
|
|
||||||
|
flagSet.BoolVar(&(GinkgoConfig.DryRun), prefix+"dryRun", false, "If set, ginkgo will walk the test hierarchy without actually running anything. Best paired with -v.")
|
||||||
|
|
||||||
|
flagSet.StringVar(&(GinkgoConfig.FocusString), prefix+"focus", "", "If set, ginkgo will only run specs that match this regular expression.")
|
||||||
|
flagSet.StringVar(&(GinkgoConfig.SkipString), prefix+"skip", "", "If set, ginkgo will only run specs that do not match this regular expression.")
|
||||||
|
|
||||||
|
flagSet.BoolVar(&(GinkgoConfig.RegexScansFilePath), prefix+"regexScansFilePath", false, "If set, ginkgo regex matching also will look at the file path (code location).")
|
||||||
|
|
||||||
|
flagSet.IntVar(&(GinkgoConfig.FlakeAttempts), prefix+"flakeAttempts", 1, "Make up to this many attempts to run each spec. Please note that if any of the attempts succeed, the suite will not be failed. But any failures will still be recorded.")
|
||||||
|
|
||||||
|
flagSet.BoolVar(&(GinkgoConfig.EmitSpecProgress), prefix+"progress", false, "If set, ginkgo will emit progress information as each spec runs to the GinkgoWriter.")
|
||||||
|
|
||||||
|
if includeParallelFlags {
|
||||||
|
flagSet.IntVar(&(GinkgoConfig.ParallelNode), prefix+"parallel.node", 1, "This worker node's (one-indexed) node number. For running specs in parallel.")
|
||||||
|
flagSet.IntVar(&(GinkgoConfig.ParallelTotal), prefix+"parallel.total", 1, "The total number of worker nodes. For running specs in parallel.")
|
||||||
|
flagSet.StringVar(&(GinkgoConfig.SyncHost), prefix+"parallel.synchost", "", "The address for the server that will synchronize the running nodes.")
|
||||||
|
flagSet.StringVar(&(GinkgoConfig.StreamHost), prefix+"parallel.streamhost", "", "The address for the server that the running nodes should stream data to.")
|
||||||
|
}
|
||||||
|
|
||||||
|
flagSet.BoolVar(&(DefaultReporterConfig.NoColor), prefix+"noColor", false, "If set, suppress color output in default reporter.")
|
||||||
|
flagSet.Float64Var(&(DefaultReporterConfig.SlowSpecThreshold), prefix+"slowSpecThreshold", 5.0, "(in seconds) Specs that take longer to run than this threshold are flagged as slow by the default reporter.")
|
||||||
|
flagSet.BoolVar(&(DefaultReporterConfig.NoisyPendings), prefix+"noisyPendings", true, "If set, default reporter will shout about pending tests.")
|
||||||
|
flagSet.BoolVar(&(DefaultReporterConfig.Verbose), prefix+"v", false, "If set, default reporter print out all specs as they begin.")
|
||||||
|
flagSet.BoolVar(&(DefaultReporterConfig.Succinct), prefix+"succinct", false, "If set, default reporter prints out a very succinct report")
|
||||||
|
flagSet.BoolVar(&(DefaultReporterConfig.FullTrace), prefix+"trace", false, "If set, default reporter prints out the full stack trace when a failure occurs")
|
||||||
|
}
|
||||||
|
|
||||||
|
func BuildFlagArgs(prefix string, ginkgo GinkgoConfigType, reporter DefaultReporterConfigType) []string {
|
||||||
|
prefix = processPrefix(prefix)
|
||||||
|
result := make([]string, 0)
|
||||||
|
|
||||||
|
if ginkgo.RandomSeed > 0 {
|
||||||
|
result = append(result, fmt.Sprintf("--%sseed=%d", prefix, ginkgo.RandomSeed))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.RandomizeAllSpecs {
|
||||||
|
result = append(result, fmt.Sprintf("--%srandomizeAllSpecs", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.SkipMeasurements {
|
||||||
|
result = append(result, fmt.Sprintf("--%sskipMeasurements", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.FailOnPending {
|
||||||
|
result = append(result, fmt.Sprintf("--%sfailOnPending", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.FailFast {
|
||||||
|
result = append(result, fmt.Sprintf("--%sfailFast", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.DryRun {
|
||||||
|
result = append(result, fmt.Sprintf("--%sdryRun", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.FocusString != "" {
|
||||||
|
result = append(result, fmt.Sprintf("--%sfocus=%s", prefix, ginkgo.FocusString))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.SkipString != "" {
|
||||||
|
result = append(result, fmt.Sprintf("--%sskip=%s", prefix, ginkgo.SkipString))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.FlakeAttempts > 1 {
|
||||||
|
result = append(result, fmt.Sprintf("--%sflakeAttempts=%d", prefix, ginkgo.FlakeAttempts))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.EmitSpecProgress {
|
||||||
|
result = append(result, fmt.Sprintf("--%sprogress", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.ParallelNode != 0 {
|
||||||
|
result = append(result, fmt.Sprintf("--%sparallel.node=%d", prefix, ginkgo.ParallelNode))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.ParallelTotal != 0 {
|
||||||
|
result = append(result, fmt.Sprintf("--%sparallel.total=%d", prefix, ginkgo.ParallelTotal))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.StreamHost != "" {
|
||||||
|
result = append(result, fmt.Sprintf("--%sparallel.streamhost=%s", prefix, ginkgo.StreamHost))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.SyncHost != "" {
|
||||||
|
result = append(result, fmt.Sprintf("--%sparallel.synchost=%s", prefix, ginkgo.SyncHost))
|
||||||
|
}
|
||||||
|
|
||||||
|
if ginkgo.RegexScansFilePath {
|
||||||
|
result = append(result, fmt.Sprintf("--%sregexScansFilePath", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if reporter.NoColor {
|
||||||
|
result = append(result, fmt.Sprintf("--%snoColor", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if reporter.SlowSpecThreshold > 0 {
|
||||||
|
result = append(result, fmt.Sprintf("--%sslowSpecThreshold=%.5f", prefix, reporter.SlowSpecThreshold))
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reporter.NoisyPendings {
|
||||||
|
result = append(result, fmt.Sprintf("--%snoisyPendings=false", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if reporter.Verbose {
|
||||||
|
result = append(result, fmt.Sprintf("--%sv", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if reporter.Succinct {
|
||||||
|
result = append(result, fmt.Sprintf("--%ssuccinct", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
if reporter.FullTrace {
|
||||||
|
result = append(result, fmt.Sprintf("--%strace", prefix))
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
98
vendor/github.com/onsi/ginkgo/extensions/table/table.go
generated
vendored
Normal file
98
vendor/github.com/onsi/ginkgo/extensions/table/table.go
generated
vendored
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Table provides a simple DSL for Ginkgo-native Table-Driven Tests
|
||||||
|
|
||||||
|
The godoc documentation describes Table's API. More comprehensive documentation (with examples!) is available at http://onsi.github.io/ginkgo#table-driven-tests
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
package table
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo"
|
||||||
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
DescribeTable describes a table-driven test.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
DescribeTable("a simple table",
|
||||||
|
func(x int, y int, expected bool) {
|
||||||
|
Ω(x > y).Should(Equal(expected))
|
||||||
|
},
|
||||||
|
Entry("x > y", 1, 0, true),
|
||||||
|
Entry("x == y", 0, 0, false),
|
||||||
|
Entry("x < y", 0, 1, false),
|
||||||
|
)
|
||||||
|
|
||||||
|
The first argument to `DescribeTable` is a string description.
|
||||||
|
The second argument is a function that will be run for each table entry. Your assertions go here - the function is equivalent to a Ginkgo It.
|
||||||
|
The subsequent arguments must be of type `TableEntry`. We recommend using the `Entry` convenience constructors.
|
||||||
|
|
||||||
|
The `Entry` constructor takes a string description followed by an arbitrary set of parameters. These parameters are passed into your function.
|
||||||
|
|
||||||
|
Under the hood, `DescribeTable` simply generates a new Ginkgo `Describe`. Each `Entry` is turned into an `It` within the `Describe`.
|
||||||
|
|
||||||
|
It's important to understand that the `Describe`s and `It`s are generated at evaluation time (i.e. when Ginkgo constructs the tree of tests and before the tests run).
|
||||||
|
|
||||||
|
Individual Entries can be focused (with FEntry) or marked pending (with PEntry or XEntry). In addition, the entire table can be focused or marked pending with FDescribeTable and PDescribeTable/XDescribeTable.
|
||||||
|
*/
|
||||||
|
func DescribeTable(description string, itBody interface{}, entries ...TableEntry) bool {
|
||||||
|
describeTable(description, itBody, entries, false, false)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
You can focus a table with `FDescribeTable`. This is equivalent to `FDescribe`.
|
||||||
|
*/
|
||||||
|
func FDescribeTable(description string, itBody interface{}, entries ...TableEntry) bool {
|
||||||
|
describeTable(description, itBody, entries, false, true)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
You can mark a table as pending with `PDescribeTable`. This is equivalent to `PDescribe`.
|
||||||
|
*/
|
||||||
|
func PDescribeTable(description string, itBody interface{}, entries ...TableEntry) bool {
|
||||||
|
describeTable(description, itBody, entries, true, false)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
You can mark a table as pending with `XDescribeTable`. This is equivalent to `XDescribe`.
|
||||||
|
*/
|
||||||
|
func XDescribeTable(description string, itBody interface{}, entries ...TableEntry) bool {
|
||||||
|
describeTable(description, itBody, entries, true, false)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func describeTable(description string, itBody interface{}, entries []TableEntry, pending bool, focused bool) {
|
||||||
|
itBodyValue := reflect.ValueOf(itBody)
|
||||||
|
if itBodyValue.Kind() != reflect.Func {
|
||||||
|
panic(fmt.Sprintf("DescribeTable expects a function, got %#v", itBody))
|
||||||
|
}
|
||||||
|
|
||||||
|
if pending {
|
||||||
|
ginkgo.PDescribe(description, func() {
|
||||||
|
for _, entry := range entries {
|
||||||
|
entry.generateIt(itBodyValue)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else if focused {
|
||||||
|
ginkgo.FDescribe(description, func() {
|
||||||
|
for _, entry := range entries {
|
||||||
|
entry.generateIt(itBodyValue)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
ginkgo.Describe(description, func() {
|
||||||
|
for _, entry := range entries {
|
||||||
|
entry.generateIt(itBodyValue)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
81
vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go
generated
vendored
Normal file
81
vendor/github.com/onsi/ginkgo/extensions/table/table_entry.go
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
package table
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo"
|
||||||
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
TableEntry represents an entry in a table test. You generally use the `Entry` constructor.
|
||||||
|
*/
|
||||||
|
type TableEntry struct {
|
||||||
|
Description string
|
||||||
|
Parameters []interface{}
|
||||||
|
Pending bool
|
||||||
|
Focused bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t TableEntry) generateIt(itBody reflect.Value) {
|
||||||
|
if t.Pending {
|
||||||
|
ginkgo.PIt(t.Description)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
values := []reflect.Value{}
|
||||||
|
for i, param := range t.Parameters {
|
||||||
|
var value reflect.Value
|
||||||
|
|
||||||
|
if param == nil {
|
||||||
|
inType := itBody.Type().In(i)
|
||||||
|
value = reflect.Zero(inType)
|
||||||
|
} else {
|
||||||
|
value = reflect.ValueOf(param)
|
||||||
|
}
|
||||||
|
|
||||||
|
values = append(values, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
body := func() {
|
||||||
|
itBody.Call(values)
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.Focused {
|
||||||
|
ginkgo.FIt(t.Description, body)
|
||||||
|
} else {
|
||||||
|
ginkgo.It(t.Description, body)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Entry constructs a TableEntry.
|
||||||
|
|
||||||
|
The first argument is a required description (this becomes the content of the generated Ginkgo `It`).
|
||||||
|
Subsequent parameters are saved off and sent to the callback passed in to `DescribeTable`.
|
||||||
|
|
||||||
|
Each Entry ends up generating an individual Ginkgo It.
|
||||||
|
*/
|
||||||
|
func Entry(description string, parameters ...interface{}) TableEntry {
|
||||||
|
return TableEntry{description, parameters, false, false}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
You can focus a particular entry with FEntry. This is equivalent to FIt.
|
||||||
|
*/
|
||||||
|
func FEntry(description string, parameters ...interface{}) TableEntry {
|
||||||
|
return TableEntry{description, parameters, false, true}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
You can mark a particular entry as pending with PEntry. This is equivalent to PIt.
|
||||||
|
*/
|
||||||
|
func PEntry(description string, parameters ...interface{}) TableEntry {
|
||||||
|
return TableEntry{description, parameters, true, false}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
You can mark a particular entry as pending with XEntry. This is equivalent to XIt.
|
||||||
|
*/
|
||||||
|
func XEntry(description string, parameters ...interface{}) TableEntry {
|
||||||
|
return TableEntry{description, parameters, true, false}
|
||||||
|
}
|
13
vendor/github.com/onsi/ginkgo/extensions/table/table_suite_test.go
generated
vendored
Normal file
13
vendor/github.com/onsi/ginkgo/extensions/table/table_suite_test.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package table_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestTable(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Table Suite")
|
||||||
|
}
|
64
vendor/github.com/onsi/ginkgo/extensions/table/table_test.go
generated
vendored
Normal file
64
vendor/github.com/onsi/ginkgo/extensions/table/table_test.go
generated
vendored
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
package table_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo/extensions/table"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Table", func() {
|
||||||
|
DescribeTable("a simple table",
|
||||||
|
func(x int, y int, expected bool) {
|
||||||
|
Ω(x > y).Should(Equal(expected))
|
||||||
|
},
|
||||||
|
Entry("x > y", 1, 0, true),
|
||||||
|
Entry("x == y", 0, 0, false),
|
||||||
|
Entry("x < y", 0, 1, false),
|
||||||
|
)
|
||||||
|
|
||||||
|
type ComplicatedThings struct {
|
||||||
|
Superstructure string
|
||||||
|
Substructure string
|
||||||
|
Count int
|
||||||
|
}
|
||||||
|
|
||||||
|
DescribeTable("a more complicated table",
|
||||||
|
func(c ComplicatedThings) {
|
||||||
|
Ω(strings.Count(c.Superstructure, c.Substructure)).Should(BeNumerically("==", c.Count))
|
||||||
|
},
|
||||||
|
Entry("with no matching substructures", ComplicatedThings{
|
||||||
|
Superstructure: "the sixth sheikh's sixth sheep's sick",
|
||||||
|
Substructure: "emir",
|
||||||
|
Count: 0,
|
||||||
|
}),
|
||||||
|
Entry("with one matching substructure", ComplicatedThings{
|
||||||
|
Superstructure: "the sixth sheikh's sixth sheep's sick",
|
||||||
|
Substructure: "sheep",
|
||||||
|
Count: 1,
|
||||||
|
}),
|
||||||
|
Entry("with many matching substructures", ComplicatedThings{
|
||||||
|
Superstructure: "the sixth sheikh's sixth sheep's sick",
|
||||||
|
Substructure: "si",
|
||||||
|
Count: 3,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
PDescribeTable("a failure",
|
||||||
|
func(value bool) {
|
||||||
|
Ω(value).Should(BeFalse())
|
||||||
|
},
|
||||||
|
Entry("when true", true),
|
||||||
|
Entry("when false", false),
|
||||||
|
Entry("when malformed", 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
DescribeTable("an untyped nil as an entry",
|
||||||
|
func(x interface{}) {
|
||||||
|
Expect(x).To(BeNil())
|
||||||
|
},
|
||||||
|
Entry("nil", nil),
|
||||||
|
)
|
||||||
|
})
|
202
vendor/github.com/onsi/ginkgo/ginkgo/bootstrap_command.go
generated
vendored
Normal file
202
vendor/github.com/onsi/ginkgo/ginkgo/bootstrap_command.go
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
|
||||||
|
"go/build"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/nodot"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildBootstrapCommand() *Command {
|
||||||
|
var (
|
||||||
|
agouti, noDot, internal bool
|
||||||
|
customBootstrapFile string
|
||||||
|
)
|
||||||
|
flagSet := flag.NewFlagSet("bootstrap", flag.ExitOnError)
|
||||||
|
flagSet.BoolVar(&agouti, "agouti", false, "If set, bootstrap will generate a bootstrap file for writing Agouti tests")
|
||||||
|
flagSet.BoolVar(&noDot, "nodot", false, "If set, bootstrap will generate a bootstrap file that does not . import ginkgo and gomega")
|
||||||
|
flagSet.BoolVar(&internal, "internal", false, "If set, generate will generate a test file that uses the regular package name")
|
||||||
|
flagSet.StringVar(&customBootstrapFile, "template", "", "If specified, generate will use the contents of the file passed as the bootstrap template")
|
||||||
|
|
||||||
|
return &Command{
|
||||||
|
Name: "bootstrap",
|
||||||
|
FlagSet: flagSet,
|
||||||
|
UsageCommand: "ginkgo bootstrap <FLAGS>",
|
||||||
|
Usage: []string{
|
||||||
|
"Bootstrap a test suite for the current package",
|
||||||
|
"Accepts the following flags:",
|
||||||
|
},
|
||||||
|
Command: func(args []string, additionalArgs []string) {
|
||||||
|
generateBootstrap(agouti, noDot, internal, customBootstrapFile)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var bootstrapText = `package {{.Package}}
|
||||||
|
|
||||||
|
import (
|
||||||
|
{{.GinkgoImport}}
|
||||||
|
{{.GomegaImport}}
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test{{.FormattedName}}(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "{{.FormattedName}} Suite")
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
var agoutiBootstrapText = `package {{.Package}}
|
||||||
|
|
||||||
|
import (
|
||||||
|
{{.GinkgoImport}}
|
||||||
|
{{.GomegaImport}}
|
||||||
|
"github.com/sclevine/agouti"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test{{.FormattedName}}(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "{{.FormattedName}} Suite")
|
||||||
|
}
|
||||||
|
|
||||||
|
var agoutiDriver *agouti.WebDriver
|
||||||
|
|
||||||
|
var _ = BeforeSuite(func() {
|
||||||
|
// Choose a WebDriver:
|
||||||
|
|
||||||
|
agoutiDriver = agouti.PhantomJS()
|
||||||
|
// agoutiDriver = agouti.Selenium()
|
||||||
|
// agoutiDriver = agouti.ChromeDriver()
|
||||||
|
|
||||||
|
Expect(agoutiDriver.Start()).To(Succeed())
|
||||||
|
})
|
||||||
|
|
||||||
|
var _ = AfterSuite(func() {
|
||||||
|
Expect(agoutiDriver.Stop()).To(Succeed())
|
||||||
|
})
|
||||||
|
`
|
||||||
|
|
||||||
|
type bootstrapData struct {
|
||||||
|
Package string
|
||||||
|
FormattedName string
|
||||||
|
GinkgoImport string
|
||||||
|
GomegaImport string
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPackageAndFormattedName() (string, string, string) {
|
||||||
|
path, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
complainAndQuit("Could not get current working directory: \n" + err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
dirName := strings.Replace(filepath.Base(path), "-", "_", -1)
|
||||||
|
dirName = strings.Replace(dirName, " ", "_", -1)
|
||||||
|
|
||||||
|
pkg, err := build.ImportDir(path, 0)
|
||||||
|
packageName := pkg.Name
|
||||||
|
if err != nil {
|
||||||
|
packageName = dirName
|
||||||
|
}
|
||||||
|
|
||||||
|
formattedName := prettifyPackageName(filepath.Base(path))
|
||||||
|
return packageName, dirName, formattedName
|
||||||
|
}
|
||||||
|
|
||||||
|
func prettifyPackageName(name string) string {
|
||||||
|
name = strings.Replace(name, "-", " ", -1)
|
||||||
|
name = strings.Replace(name, "_", " ", -1)
|
||||||
|
name = strings.Title(name)
|
||||||
|
name = strings.Replace(name, " ", "", -1)
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|
||||||
|
func determinePackageName(name string, internal bool) string {
|
||||||
|
if internal {
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|
||||||
|
return name + "_test"
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileExists(path string) bool {
|
||||||
|
_, err := os.Stat(path)
|
||||||
|
if err == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateBootstrap(agouti, noDot, internal bool, customBootstrapFile string) {
|
||||||
|
packageName, bootstrapFilePrefix, formattedName := getPackageAndFormattedName()
|
||||||
|
data := bootstrapData{
|
||||||
|
Package: determinePackageName(packageName, internal),
|
||||||
|
FormattedName: formattedName,
|
||||||
|
GinkgoImport: `. "github.com/onsi/ginkgo"`,
|
||||||
|
GomegaImport: `. "github.com/onsi/gomega"`,
|
||||||
|
}
|
||||||
|
|
||||||
|
if noDot {
|
||||||
|
data.GinkgoImport = `"github.com/onsi/ginkgo"`
|
||||||
|
data.GomegaImport = `"github.com/onsi/gomega"`
|
||||||
|
}
|
||||||
|
|
||||||
|
targetFile := fmt.Sprintf("%s_suite_test.go", bootstrapFilePrefix)
|
||||||
|
if fileExists(targetFile) {
|
||||||
|
fmt.Printf("%s already exists.\n\n", targetFile)
|
||||||
|
os.Exit(1)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("Generating ginkgo test suite bootstrap for %s in:\n\t%s\n", packageName, targetFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Create(targetFile)
|
||||||
|
if err != nil {
|
||||||
|
complainAndQuit("Could not create file: " + err.Error())
|
||||||
|
panic(err.Error())
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
var templateText string
|
||||||
|
if customBootstrapFile != "" {
|
||||||
|
tpl, err := ioutil.ReadFile(customBootstrapFile)
|
||||||
|
if err != nil {
|
||||||
|
panic(err.Error())
|
||||||
|
}
|
||||||
|
templateText = string(tpl)
|
||||||
|
} else if agouti {
|
||||||
|
templateText = agoutiBootstrapText
|
||||||
|
} else {
|
||||||
|
templateText = bootstrapText
|
||||||
|
}
|
||||||
|
|
||||||
|
bootstrapTemplate, err := template.New("bootstrap").Parse(templateText)
|
||||||
|
if err != nil {
|
||||||
|
panic(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
bootstrapTemplate.Execute(buf, data)
|
||||||
|
|
||||||
|
if noDot {
|
||||||
|
contents, err := nodot.ApplyNoDot(buf.Bytes())
|
||||||
|
if err != nil {
|
||||||
|
complainAndQuit("Failed to import nodot declarations: " + err.Error())
|
||||||
|
}
|
||||||
|
fmt.Println("To update the nodot declarations in the future, switch to this directory and run:\n\tginkgo nodot")
|
||||||
|
buf = bytes.NewBuffer(contents)
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.WriteTo(f)
|
||||||
|
|
||||||
|
goFmt(targetFile)
|
||||||
|
}
|
68
vendor/github.com/onsi/ginkgo/ginkgo/build_command.go
generated
vendored
Normal file
68
vendor/github.com/onsi/ginkgo/ginkgo/build_command.go
generated
vendored
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/interrupthandler"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testrunner"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildBuildCommand() *Command {
|
||||||
|
commandFlags := NewBuildCommandFlags(flag.NewFlagSet("build", flag.ExitOnError))
|
||||||
|
interruptHandler := interrupthandler.NewInterruptHandler()
|
||||||
|
builder := &SpecBuilder{
|
||||||
|
commandFlags: commandFlags,
|
||||||
|
interruptHandler: interruptHandler,
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Command{
|
||||||
|
Name: "build",
|
||||||
|
FlagSet: commandFlags.FlagSet,
|
||||||
|
UsageCommand: "ginkgo build <FLAGS> <PACKAGES>",
|
||||||
|
Usage: []string{
|
||||||
|
"Build the passed in <PACKAGES> (or the package in the current directory if left blank).",
|
||||||
|
"Accepts the following flags:",
|
||||||
|
},
|
||||||
|
Command: builder.BuildSpecs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type SpecBuilder struct {
|
||||||
|
commandFlags *RunWatchAndBuildCommandFlags
|
||||||
|
interruptHandler *interrupthandler.InterruptHandler
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SpecBuilder) BuildSpecs(args []string, additionalArgs []string) {
|
||||||
|
r.commandFlags.computeNodes()
|
||||||
|
|
||||||
|
suites, _ := findSuites(args, r.commandFlags.Recurse, r.commandFlags.SkipPackage, false)
|
||||||
|
|
||||||
|
if len(suites) == 0 {
|
||||||
|
complainAndQuit("Found no test suites")
|
||||||
|
}
|
||||||
|
|
||||||
|
passed := true
|
||||||
|
for _, suite := range suites {
|
||||||
|
runner := testrunner.New(suite, 1, false, r.commandFlags.GoOpts, nil)
|
||||||
|
fmt.Printf("Compiling %s...\n", suite.PackageName)
|
||||||
|
|
||||||
|
path, _ := filepath.Abs(filepath.Join(suite.Path, fmt.Sprintf("%s.test", suite.PackageName)))
|
||||||
|
err := runner.CompileTo(path)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
passed = false
|
||||||
|
} else {
|
||||||
|
fmt.Printf(" compiled %s.test\n", suite.PackageName)
|
||||||
|
}
|
||||||
|
|
||||||
|
runner.CleanUp()
|
||||||
|
}
|
||||||
|
|
||||||
|
if passed {
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
123
vendor/github.com/onsi/ginkgo/ginkgo/convert/ginkgo_ast_nodes.go
generated
vendored
Normal file
123
vendor/github.com/onsi/ginkgo/ginkgo/convert/ginkgo_ast_nodes.go
generated
vendored
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
package convert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Creates a func init() node
|
||||||
|
*/
|
||||||
|
func createVarUnderscoreBlock() *ast.ValueSpec {
|
||||||
|
valueSpec := &ast.ValueSpec{}
|
||||||
|
object := &ast.Object{Kind: 4, Name: "_", Decl: valueSpec, Data: 0}
|
||||||
|
ident := &ast.Ident{Name: "_", Obj: object}
|
||||||
|
valueSpec.Names = append(valueSpec.Names, ident)
|
||||||
|
return valueSpec
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Creates a Describe("Testing with ginkgo", func() { }) node
|
||||||
|
*/
|
||||||
|
func createDescribeBlock() *ast.CallExpr {
|
||||||
|
blockStatement := &ast.BlockStmt{List: []ast.Stmt{}}
|
||||||
|
|
||||||
|
fieldList := &ast.FieldList{}
|
||||||
|
funcType := &ast.FuncType{Params: fieldList}
|
||||||
|
funcLit := &ast.FuncLit{Type: funcType, Body: blockStatement}
|
||||||
|
basicLit := &ast.BasicLit{Kind: 9, Value: "\"Testing with Ginkgo\""}
|
||||||
|
describeIdent := &ast.Ident{Name: "Describe"}
|
||||||
|
return &ast.CallExpr{Fun: describeIdent, Args: []ast.Expr{basicLit, funcLit}}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convenience function to return the name of the *testing.T param
|
||||||
|
* for a Test function that will be rewritten. This is useful because
|
||||||
|
* we will want to replace the usage of this named *testing.T inside the
|
||||||
|
* body of the function with a GinktoT.
|
||||||
|
*/
|
||||||
|
func namedTestingTArg(node *ast.FuncDecl) string {
|
||||||
|
return node.Type.Params.List[0].Names[0].Name // *exhale*
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Convenience function to return the block statement node for a Describe statement
|
||||||
|
*/
|
||||||
|
func blockStatementFromDescribe(desc *ast.CallExpr) *ast.BlockStmt {
|
||||||
|
var funcLit *ast.FuncLit
|
||||||
|
var found = false
|
||||||
|
|
||||||
|
for _, node := range desc.Args {
|
||||||
|
switch node := node.(type) {
|
||||||
|
case *ast.FuncLit:
|
||||||
|
found = true
|
||||||
|
funcLit = node
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
panic("Error finding ast.FuncLit inside describe statement. Somebody done goofed.")
|
||||||
|
}
|
||||||
|
|
||||||
|
return funcLit.Body
|
||||||
|
}
|
||||||
|
|
||||||
|
/* convenience function for creating an It("TestNameHere")
|
||||||
|
* with all the body of the test function inside the anonymous
|
||||||
|
* func passed to It()
|
||||||
|
*/
|
||||||
|
func createItStatementForTestFunc(testFunc *ast.FuncDecl) *ast.ExprStmt {
|
||||||
|
blockStatement := &ast.BlockStmt{List: testFunc.Body.List}
|
||||||
|
fieldList := &ast.FieldList{}
|
||||||
|
funcType := &ast.FuncType{Params: fieldList}
|
||||||
|
funcLit := &ast.FuncLit{Type: funcType, Body: blockStatement}
|
||||||
|
|
||||||
|
testName := rewriteTestName(testFunc.Name.Name)
|
||||||
|
basicLit := &ast.BasicLit{Kind: 9, Value: fmt.Sprintf("\"%s\"", testName)}
|
||||||
|
itBlockIdent := &ast.Ident{Name: "It"}
|
||||||
|
callExpr := &ast.CallExpr{Fun: itBlockIdent, Args: []ast.Expr{basicLit, funcLit}}
|
||||||
|
return &ast.ExprStmt{X: callExpr}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* rewrite test names to be human readable
|
||||||
|
* eg: rewrites "TestSomethingAmazing" as "something amazing"
|
||||||
|
*/
|
||||||
|
func rewriteTestName(testName string) string {
|
||||||
|
nameComponents := []string{}
|
||||||
|
currentString := ""
|
||||||
|
indexOfTest := strings.Index(testName, "Test")
|
||||||
|
if indexOfTest != 0 {
|
||||||
|
return testName
|
||||||
|
}
|
||||||
|
|
||||||
|
testName = strings.Replace(testName, "Test", "", 1)
|
||||||
|
first, rest := testName[0], testName[1:]
|
||||||
|
testName = string(unicode.ToLower(rune(first))) + rest
|
||||||
|
|
||||||
|
for _, rune := range testName {
|
||||||
|
if unicode.IsUpper(rune) {
|
||||||
|
nameComponents = append(nameComponents, currentString)
|
||||||
|
currentString = string(unicode.ToLower(rune))
|
||||||
|
} else {
|
||||||
|
currentString += string(rune)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(append(nameComponents, currentString), " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
func newGinkgoTFromIdent(ident *ast.Ident) *ast.CallExpr {
|
||||||
|
return &ast.CallExpr{
|
||||||
|
Lparen: ident.NamePos + 1,
|
||||||
|
Rparen: ident.NamePos + 2,
|
||||||
|
Fun: &ast.Ident{Name: "GinkgoT"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newGinkgoTInterface() *ast.Ident {
|
||||||
|
return &ast.Ident{Name: "GinkgoTInterface"}
|
||||||
|
}
|
91
vendor/github.com/onsi/ginkgo/ginkgo/convert/import.go
generated
vendored
Normal file
91
vendor/github.com/onsi/ginkgo/ginkgo/convert/import.go
generated
vendored
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package convert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Given the root node of an AST, returns the node containing the
|
||||||
|
* import statements for the file.
|
||||||
|
*/
|
||||||
|
func importsForRootNode(rootNode *ast.File) (imports *ast.GenDecl, err error) {
|
||||||
|
for _, declaration := range rootNode.Decls {
|
||||||
|
decl, ok := declaration.(*ast.GenDecl)
|
||||||
|
if !ok || len(decl.Specs) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
_, ok = decl.Specs[0].(*ast.ImportSpec)
|
||||||
|
if ok {
|
||||||
|
imports = decl
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err = errors.New(fmt.Sprintf("Could not find imports for root node:\n\t%#v\n", rootNode))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Removes "testing" import, if present
|
||||||
|
*/
|
||||||
|
func removeTestingImport(rootNode *ast.File) {
|
||||||
|
importDecl, err := importsForRootNode(rootNode)
|
||||||
|
if err != nil {
|
||||||
|
panic(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
var index int
|
||||||
|
for i, importSpec := range importDecl.Specs {
|
||||||
|
importSpec := importSpec.(*ast.ImportSpec)
|
||||||
|
if importSpec.Path.Value == "\"testing\"" {
|
||||||
|
index = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
importDecl.Specs = append(importDecl.Specs[:index], importDecl.Specs[index+1:]...)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Adds import statements for onsi/ginkgo, if missing
|
||||||
|
*/
|
||||||
|
func addGinkgoImports(rootNode *ast.File) {
|
||||||
|
importDecl, err := importsForRootNode(rootNode)
|
||||||
|
if err != nil {
|
||||||
|
panic(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(importDecl.Specs) == 0 {
|
||||||
|
// TODO: might need to create a import decl here
|
||||||
|
panic("unimplemented : expected to find an imports block")
|
||||||
|
}
|
||||||
|
|
||||||
|
needsGinkgo := true
|
||||||
|
for _, importSpec := range importDecl.Specs {
|
||||||
|
importSpec, ok := importSpec.(*ast.ImportSpec)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if importSpec.Path.Value == "\"github.com/onsi/ginkgo\"" {
|
||||||
|
needsGinkgo = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if needsGinkgo {
|
||||||
|
importDecl.Specs = append(importDecl.Specs, createImport(".", "\"github.com/onsi/ginkgo\""))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* convenience function to create an import statement
|
||||||
|
*/
|
||||||
|
func createImport(name, path string) *ast.ImportSpec {
|
||||||
|
return &ast.ImportSpec{
|
||||||
|
Name: &ast.Ident{Name: name},
|
||||||
|
Path: &ast.BasicLit{Kind: 9, Value: path},
|
||||||
|
}
|
||||||
|
}
|
127
vendor/github.com/onsi/ginkgo/ginkgo/convert/package_rewriter.go
generated
vendored
Normal file
127
vendor/github.com/onsi/ginkgo/ginkgo/convert/package_rewriter.go
generated
vendored
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
package convert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/build"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
* RewritePackage takes a name (eg: my-package/tools), finds its test files using
|
||||||
|
* Go's build package, and then rewrites them. A ginkgo test suite file will
|
||||||
|
* also be added for this package, and all of its child packages.
|
||||||
|
*/
|
||||||
|
func RewritePackage(packageName string) {
|
||||||
|
pkg, err := packageWithName(packageName)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("unexpected error reading package: '%s'\n%s\n", packageName, err.Error()))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, filename := range findTestsInPackage(pkg) {
|
||||||
|
rewriteTestsInFile(filename)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Given a package, findTestsInPackage reads the test files in the directory,
|
||||||
|
* and then recurses on each child package, returning a slice of all test files
|
||||||
|
* found in this process.
|
||||||
|
*/
|
||||||
|
func findTestsInPackage(pkg *build.Package) (testfiles []string) {
|
||||||
|
for _, file := range append(pkg.TestGoFiles, pkg.XTestGoFiles...) {
|
||||||
|
testfiles = append(testfiles, filepath.Join(pkg.Dir, file))
|
||||||
|
}
|
||||||
|
|
||||||
|
dirFiles, err := ioutil.ReadDir(pkg.Dir)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("unexpected error reading dir: '%s'\n%s\n", pkg.Dir, err.Error()))
|
||||||
|
}
|
||||||
|
|
||||||
|
re := regexp.MustCompile(`^[._]`)
|
||||||
|
|
||||||
|
for _, file := range dirFiles {
|
||||||
|
if !file.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if re.Match([]byte(file.Name())) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
packageName := filepath.Join(pkg.ImportPath, file.Name())
|
||||||
|
subPackage, err := packageWithName(packageName)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("unexpected error reading package: '%s'\n%s\n", packageName, err.Error()))
|
||||||
|
}
|
||||||
|
|
||||||
|
testfiles = append(testfiles, findTestsInPackage(subPackage)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
addGinkgoSuiteForPackage(pkg)
|
||||||
|
goFmtPackage(pkg)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Shells out to `ginkgo bootstrap` to create a test suite file
|
||||||
|
*/
|
||||||
|
func addGinkgoSuiteForPackage(pkg *build.Package) {
|
||||||
|
originalDir, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
suite_test_file := filepath.Join(pkg.Dir, pkg.Name+"_suite_test.go")
|
||||||
|
|
||||||
|
_, err = os.Stat(suite_test_file)
|
||||||
|
if err == nil {
|
||||||
|
return // test file already exists, this should be a no-op
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.Chdir(pkg.Dir)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
output, err := exec.Command("ginkgo", "bootstrap").Output()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("error running 'ginkgo bootstrap'.\nstdout: %s\n%s\n", output, err.Error()))
|
||||||
|
}
|
||||||
|
|
||||||
|
err = os.Chdir(originalDir)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Shells out to `go fmt` to format the package
|
||||||
|
*/
|
||||||
|
func goFmtPackage(pkg *build.Package) {
|
||||||
|
output, err := exec.Command("go", "fmt", pkg.ImportPath).Output()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Warning: Error running 'go fmt %s'.\nstdout: %s\n%s\n", pkg.ImportPath, output, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Attempts to return a package with its test files already read.
|
||||||
|
* The ImportMode arg to build.Import lets you specify if you want go to read the
|
||||||
|
* buildable go files inside the package, but it fails if the package has no go files
|
||||||
|
*/
|
||||||
|
func packageWithName(name string) (pkg *build.Package, err error) {
|
||||||
|
pkg, err = build.Default.Import(name, ".", build.ImportMode(0))
|
||||||
|
if err == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg, err = build.Default.Import(name, ".", build.ImportMode(1))
|
||||||
|
return
|
||||||
|
}
|
56
vendor/github.com/onsi/ginkgo/ginkgo/convert/test_finder.go
generated
vendored
Normal file
56
vendor/github.com/onsi/ginkgo/ginkgo/convert/test_finder.go
generated
vendored
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
package convert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"regexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Given a root node, walks its top level statements and returns
|
||||||
|
* points to function nodes to rewrite as It statements.
|
||||||
|
* These functions, according to Go testing convention, must be named
|
||||||
|
* TestWithCamelCasedName and receive a single *testing.T argument.
|
||||||
|
*/
|
||||||
|
func findTestFuncs(rootNode *ast.File) (testsToRewrite []*ast.FuncDecl) {
|
||||||
|
testNameRegexp := regexp.MustCompile("^Test[0-9A-Z].+")
|
||||||
|
|
||||||
|
ast.Inspect(rootNode, func(node ast.Node) bool {
|
||||||
|
if node == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
switch node := node.(type) {
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
matches := testNameRegexp.MatchString(node.Name.Name)
|
||||||
|
|
||||||
|
if matches && receivesTestingT(node) {
|
||||||
|
testsToRewrite = append(testsToRewrite, node)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* convenience function that looks at args to a function and determines if its
|
||||||
|
* params include an argument of type *testing.T
|
||||||
|
*/
|
||||||
|
func receivesTestingT(node *ast.FuncDecl) bool {
|
||||||
|
if len(node.Type.Params.List) != 1 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
base, ok := node.Type.Params.List[0].Type.(*ast.StarExpr)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
intermediate := base.X.(*ast.SelectorExpr)
|
||||||
|
isTestingPackage := intermediate.X.(*ast.Ident).Name == "testing"
|
||||||
|
isTestingT := intermediate.Sel.Name == "T"
|
||||||
|
|
||||||
|
return isTestingPackage && isTestingT
|
||||||
|
}
|
163
vendor/github.com/onsi/ginkgo/ginkgo/convert/testfile_rewriter.go
generated
vendored
Normal file
163
vendor/github.com/onsi/ginkgo/ginkgo/convert/testfile_rewriter.go
generated
vendored
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
package convert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/format"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Given a file path, rewrites any tests in the Ginkgo format.
|
||||||
|
* First, we parse the AST, and update the imports declaration.
|
||||||
|
* Then, we walk the first child elements in the file, returning tests to rewrite.
|
||||||
|
* A top level init func is declared, with a single Describe func inside.
|
||||||
|
* Then the test functions to rewrite are inserted as It statements inside the Describe.
|
||||||
|
* Finally we walk the rest of the file, replacing other usages of *testing.T
|
||||||
|
* Once that is complete, we write the AST back out again to its file.
|
||||||
|
*/
|
||||||
|
func rewriteTestsInFile(pathToFile string) {
|
||||||
|
fileSet := token.NewFileSet()
|
||||||
|
rootNode, err := parser.ParseFile(fileSet, pathToFile, nil, 0)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("Error parsing test file '%s':\n%s\n", pathToFile, err.Error()))
|
||||||
|
}
|
||||||
|
|
||||||
|
addGinkgoImports(rootNode)
|
||||||
|
removeTestingImport(rootNode)
|
||||||
|
|
||||||
|
varUnderscoreBlock := createVarUnderscoreBlock()
|
||||||
|
describeBlock := createDescribeBlock()
|
||||||
|
varUnderscoreBlock.Values = []ast.Expr{describeBlock}
|
||||||
|
|
||||||
|
for _, testFunc := range findTestFuncs(rootNode) {
|
||||||
|
rewriteTestFuncAsItStatement(testFunc, rootNode, describeBlock)
|
||||||
|
}
|
||||||
|
|
||||||
|
underscoreDecl := &ast.GenDecl{
|
||||||
|
Tok: 85, // gah, magick numbers are needed to make this work
|
||||||
|
TokPos: 14, // this tricks Go into writing "var _ = Describe"
|
||||||
|
Specs: []ast.Spec{varUnderscoreBlock},
|
||||||
|
}
|
||||||
|
|
||||||
|
imports := rootNode.Decls[0]
|
||||||
|
tail := rootNode.Decls[1:]
|
||||||
|
rootNode.Decls = append(append([]ast.Decl{imports}, underscoreDecl), tail...)
|
||||||
|
rewriteOtherFuncsToUseGinkgoT(rootNode.Decls)
|
||||||
|
walkNodesInRootNodeReplacingTestingT(rootNode)
|
||||||
|
|
||||||
|
var buffer bytes.Buffer
|
||||||
|
if err = format.Node(&buffer, fileSet, rootNode); err != nil {
|
||||||
|
panic(fmt.Sprintf("Error formatting ast node after rewriting tests.\n%s\n", err.Error()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fileInfo, err := os.Stat(pathToFile)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("Error stat'ing file: %s\n", pathToFile))
|
||||||
|
}
|
||||||
|
|
||||||
|
ioutil.WriteFile(pathToFile, buffer.Bytes(), fileInfo.Mode())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Given a test func named TestDoesSomethingNeat, rewrites it as
|
||||||
|
* It("does something neat", func() { __test_body_here__ }) and adds it
|
||||||
|
* to the Describe's list of statements
|
||||||
|
*/
|
||||||
|
func rewriteTestFuncAsItStatement(testFunc *ast.FuncDecl, rootNode *ast.File, describe *ast.CallExpr) {
|
||||||
|
var funcIndex int = -1
|
||||||
|
for index, child := range rootNode.Decls {
|
||||||
|
if child == testFunc {
|
||||||
|
funcIndex = index
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if funcIndex < 0 {
|
||||||
|
panic(fmt.Sprintf("Assert failed: Error finding index for test node %s\n", testFunc.Name.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
var block *ast.BlockStmt = blockStatementFromDescribe(describe)
|
||||||
|
block.List = append(block.List, createItStatementForTestFunc(testFunc))
|
||||||
|
replaceTestingTsWithGinkgoT(block, namedTestingTArg(testFunc))
|
||||||
|
|
||||||
|
// remove the old test func from the root node's declarations
|
||||||
|
rootNode.Decls = append(rootNode.Decls[:funcIndex], rootNode.Decls[funcIndex+1:]...)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* walks nodes inside of a test func's statements and replaces the usage of
|
||||||
|
* it's named *testing.T param with GinkgoT's
|
||||||
|
*/
|
||||||
|
func replaceTestingTsWithGinkgoT(statementsBlock *ast.BlockStmt, testingT string) {
|
||||||
|
ast.Inspect(statementsBlock, func(node ast.Node) bool {
|
||||||
|
if node == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
keyValueExpr, ok := node.(*ast.KeyValueExpr)
|
||||||
|
if ok {
|
||||||
|
replaceNamedTestingTsInKeyValueExpression(keyValueExpr, testingT)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
funcLiteral, ok := node.(*ast.FuncLit)
|
||||||
|
if ok {
|
||||||
|
replaceTypeDeclTestingTsInFuncLiteral(funcLiteral)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
callExpr, ok := node.(*ast.CallExpr)
|
||||||
|
if !ok {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
replaceTestingTsInArgsLists(callExpr, testingT)
|
||||||
|
|
||||||
|
funCall, ok := callExpr.Fun.(*ast.SelectorExpr)
|
||||||
|
if ok {
|
||||||
|
replaceTestingTsMethodCalls(funCall, testingT)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* rewrite t.Fail() or any other *testing.T method by replacing with T().Fail()
|
||||||
|
* This function receives a selector expression (eg: t.Fail()) and
|
||||||
|
* the name of the *testing.T param from the function declaration. Rewrites the
|
||||||
|
* selector expression in place if the target was a *testing.T
|
||||||
|
*/
|
||||||
|
func replaceTestingTsMethodCalls(selectorExpr *ast.SelectorExpr, testingT string) {
|
||||||
|
ident, ok := selectorExpr.X.(*ast.Ident)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if ident.Name == testingT {
|
||||||
|
selectorExpr.X = newGinkgoTFromIdent(ident)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* replaces usages of a named *testing.T param inside of a call expression
|
||||||
|
* with a new GinkgoT object
|
||||||
|
*/
|
||||||
|
func replaceTestingTsInArgsLists(callExpr *ast.CallExpr, testingT string) {
|
||||||
|
for index, arg := range callExpr.Args {
|
||||||
|
ident, ok := arg.(*ast.Ident)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if ident.Name == testingT {
|
||||||
|
callExpr.Args[index] = newGinkgoTFromIdent(ident)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
130
vendor/github.com/onsi/ginkgo/ginkgo/convert/testing_t_rewriter.go
generated
vendored
Normal file
130
vendor/github.com/onsi/ginkgo/ginkgo/convert/testing_t_rewriter.go
generated
vendored
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
package convert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Rewrites any other top level funcs that receive a *testing.T param
|
||||||
|
*/
|
||||||
|
func rewriteOtherFuncsToUseGinkgoT(declarations []ast.Decl) {
|
||||||
|
for _, decl := range declarations {
|
||||||
|
decl, ok := decl.(*ast.FuncDecl)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, param := range decl.Type.Params.List {
|
||||||
|
starExpr, ok := param.Type.(*ast.StarExpr)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
selectorExpr, ok := starExpr.X.(*ast.SelectorExpr)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
xIdent, ok := selectorExpr.X.(*ast.Ident)
|
||||||
|
if !ok || xIdent.Name != "testing" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if selectorExpr.Sel.Name != "T" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
param.Type = newGinkgoTInterface()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Walks all of the nodes in the file, replacing *testing.T in struct
|
||||||
|
* and func literal nodes. eg:
|
||||||
|
* type foo struct { *testing.T }
|
||||||
|
* var bar = func(t *testing.T) { }
|
||||||
|
*/
|
||||||
|
func walkNodesInRootNodeReplacingTestingT(rootNode *ast.File) {
|
||||||
|
ast.Inspect(rootNode, func(node ast.Node) bool {
|
||||||
|
if node == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
switch node := node.(type) {
|
||||||
|
case *ast.StructType:
|
||||||
|
replaceTestingTsInStructType(node)
|
||||||
|
case *ast.FuncLit:
|
||||||
|
replaceTypeDeclTestingTsInFuncLiteral(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* replaces named *testing.T inside a composite literal
|
||||||
|
*/
|
||||||
|
func replaceNamedTestingTsInKeyValueExpression(kve *ast.KeyValueExpr, testingT string) {
|
||||||
|
ident, ok := kve.Value.(*ast.Ident)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if ident.Name == testingT {
|
||||||
|
kve.Value = newGinkgoTFromIdent(ident)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* replaces *testing.T params in a func literal with GinkgoT
|
||||||
|
*/
|
||||||
|
func replaceTypeDeclTestingTsInFuncLiteral(functionLiteral *ast.FuncLit) {
|
||||||
|
for _, arg := range functionLiteral.Type.Params.List {
|
||||||
|
starExpr, ok := arg.Type.(*ast.StarExpr)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
selectorExpr, ok := starExpr.X.(*ast.SelectorExpr)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
target, ok := selectorExpr.X.(*ast.Ident)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if target.Name == "testing" && selectorExpr.Sel.Name == "T" {
|
||||||
|
arg.Type = newGinkgoTInterface()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Replaces *testing.T types inside of a struct declaration with a GinkgoT
|
||||||
|
* eg: type foo struct { *testing.T }
|
||||||
|
*/
|
||||||
|
func replaceTestingTsInStructType(structType *ast.StructType) {
|
||||||
|
for _, field := range structType.Fields.List {
|
||||||
|
starExpr, ok := field.Type.(*ast.StarExpr)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
selectorExpr, ok := starExpr.X.(*ast.SelectorExpr)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
xIdent, ok := selectorExpr.X.(*ast.Ident)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if xIdent.Name == "testing" && selectorExpr.Sel.Name == "T" {
|
||||||
|
field.Type = newGinkgoTInterface()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
44
vendor/github.com/onsi/ginkgo/ginkgo/convert_command.go
generated
vendored
Normal file
44
vendor/github.com/onsi/ginkgo/ginkgo/convert_command.go
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/convert"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildConvertCommand() *Command {
|
||||||
|
return &Command{
|
||||||
|
Name: "convert",
|
||||||
|
FlagSet: flag.NewFlagSet("convert", flag.ExitOnError),
|
||||||
|
UsageCommand: "ginkgo convert /path/to/package",
|
||||||
|
Usage: []string{
|
||||||
|
"Convert the package at the passed in path from an XUnit-style test to a Ginkgo-style test",
|
||||||
|
},
|
||||||
|
Command: convertPackage,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertPackage(args []string, additionalArgs []string) {
|
||||||
|
if len(args) != 1 {
|
||||||
|
println(fmt.Sprintf("usage: ginkgo convert /path/to/your/package"))
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
err := recover()
|
||||||
|
if err != nil {
|
||||||
|
switch err := err.(type) {
|
||||||
|
case error:
|
||||||
|
println(err.Error())
|
||||||
|
case string:
|
||||||
|
println(err)
|
||||||
|
default:
|
||||||
|
println(fmt.Sprintf("unexpected error: %#v", err))
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
convert.RewritePackage(args[0])
|
||||||
|
}
|
167
vendor/github.com/onsi/ginkgo/ginkgo/generate_command.go
generated
vendored
Normal file
167
vendor/github.com/onsi/ginkgo/ginkgo/generate_command.go
generated
vendored
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildGenerateCommand() *Command {
|
||||||
|
var agouti, noDot, internal bool
|
||||||
|
flagSet := flag.NewFlagSet("generate", flag.ExitOnError)
|
||||||
|
flagSet.BoolVar(&agouti, "agouti", false, "If set, generate will generate a test file for writing Agouti tests")
|
||||||
|
flagSet.BoolVar(&noDot, "nodot", false, "If set, generate will generate a test file that does not . import ginkgo and gomega")
|
||||||
|
flagSet.BoolVar(&internal, "internal", false, "If set, generate will generate a test file that uses the regular package name")
|
||||||
|
|
||||||
|
return &Command{
|
||||||
|
Name: "generate",
|
||||||
|
FlagSet: flagSet,
|
||||||
|
UsageCommand: "ginkgo generate <filename(s)>",
|
||||||
|
Usage: []string{
|
||||||
|
"Generate a test file named filename_test.go",
|
||||||
|
"If the optional <filenames> argument is omitted, a file named after the package in the current directory will be created.",
|
||||||
|
"Accepts the following flags:",
|
||||||
|
},
|
||||||
|
Command: func(args []string, additionalArgs []string) {
|
||||||
|
generateSpec(args, agouti, noDot, internal)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var specText = `package {{.Package}}
|
||||||
|
|
||||||
|
import (
|
||||||
|
{{if .DotImportPackage}}. "{{.PackageImportPath}}"{{end}}
|
||||||
|
|
||||||
|
{{if .IncludeImports}}. "github.com/onsi/ginkgo"{{end}}
|
||||||
|
{{if .IncludeImports}}. "github.com/onsi/gomega"{{end}}
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("{{.Subject}}", func() {
|
||||||
|
|
||||||
|
})
|
||||||
|
`
|
||||||
|
|
||||||
|
var agoutiSpecText = `package {{.Package}}_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
{{if .DotImportPackage}}. "{{.PackageImportPath}}"{{end}}
|
||||||
|
|
||||||
|
{{if .IncludeImports}}. "github.com/onsi/ginkgo"{{end}}
|
||||||
|
{{if .IncludeImports}}. "github.com/onsi/gomega"{{end}}
|
||||||
|
. "github.com/sclevine/agouti/matchers"
|
||||||
|
"github.com/sclevine/agouti"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("{{.Subject}}", func() {
|
||||||
|
var page *agouti.Page
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
var err error
|
||||||
|
page, err = agoutiDriver.NewPage()
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
Expect(page.Destroy()).To(Succeed())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
`
|
||||||
|
|
||||||
|
type specData struct {
|
||||||
|
Package string
|
||||||
|
Subject string
|
||||||
|
PackageImportPath string
|
||||||
|
IncludeImports bool
|
||||||
|
DotImportPackage bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateSpec(args []string, agouti, noDot, internal bool) {
|
||||||
|
if len(args) == 0 {
|
||||||
|
err := generateSpecForSubject("", agouti, noDot, internal)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
fmt.Println("")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
fmt.Println("")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var failed bool
|
||||||
|
for _, arg := range args {
|
||||||
|
err := generateSpecForSubject(arg, agouti, noDot, internal)
|
||||||
|
if err != nil {
|
||||||
|
failed = true
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fmt.Println("")
|
||||||
|
if failed {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateSpecForSubject(subject string, agouti, noDot, internal bool) error {
|
||||||
|
packageName, specFilePrefix, formattedName := getPackageAndFormattedName()
|
||||||
|
if subject != "" {
|
||||||
|
subject = strings.Split(subject, ".go")[0]
|
||||||
|
subject = strings.Split(subject, "_test")[0]
|
||||||
|
specFilePrefix = subject
|
||||||
|
formattedName = prettifyPackageName(subject)
|
||||||
|
}
|
||||||
|
|
||||||
|
data := specData{
|
||||||
|
Package: determinePackageName(packageName, internal),
|
||||||
|
Subject: formattedName,
|
||||||
|
PackageImportPath: getPackageImportPath(),
|
||||||
|
IncludeImports: !noDot,
|
||||||
|
DotImportPackage: !internal,
|
||||||
|
}
|
||||||
|
|
||||||
|
targetFile := fmt.Sprintf("%s_test.go", specFilePrefix)
|
||||||
|
if fileExists(targetFile) {
|
||||||
|
return fmt.Errorf("%s already exists.", targetFile)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("Generating ginkgo test for %s in:\n %s\n", data.Subject, targetFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Create(targetFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
var templateText string
|
||||||
|
if agouti {
|
||||||
|
templateText = agoutiSpecText
|
||||||
|
} else {
|
||||||
|
templateText = specText
|
||||||
|
}
|
||||||
|
|
||||||
|
specTemplate, err := template.New("spec").Parse(templateText)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
specTemplate.Execute(f, data)
|
||||||
|
goFmt(targetFile)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPackageImportPath() string {
|
||||||
|
workingDir, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
panic(err.Error())
|
||||||
|
}
|
||||||
|
sep := string(filepath.Separator)
|
||||||
|
paths := strings.Split(workingDir, sep+"src"+sep)
|
||||||
|
if len(paths) == 1 {
|
||||||
|
fmt.Printf("\nCouldn't identify package import path.\n\n\tginkgo generate\n\nMust be run within a package directory under $GOPATH/src/...\nYou're going to have to change UNKNOWN_PACKAGE_PATH in the generated file...\n\n")
|
||||||
|
return "UNKNOWN_PACKAGE_PATH"
|
||||||
|
}
|
||||||
|
return filepath.ToSlash(paths[len(paths)-1])
|
||||||
|
}
|
31
vendor/github.com/onsi/ginkgo/ginkgo/help_command.go
generated
vendored
Normal file
31
vendor/github.com/onsi/ginkgo/ginkgo/help_command.go
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildHelpCommand() *Command {
|
||||||
|
return &Command{
|
||||||
|
Name: "help",
|
||||||
|
FlagSet: flag.NewFlagSet("help", flag.ExitOnError),
|
||||||
|
UsageCommand: "ginkgo help <COMMAND>",
|
||||||
|
Usage: []string{
|
||||||
|
"Print usage information. If a command is passed in, print usage information just for that command.",
|
||||||
|
},
|
||||||
|
Command: printHelp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func printHelp(args []string, additionalArgs []string) {
|
||||||
|
if len(args) == 0 {
|
||||||
|
usage()
|
||||||
|
} else {
|
||||||
|
command, found := commandMatching(args[0])
|
||||||
|
if !found {
|
||||||
|
complainAndQuit(fmt.Sprintf("Unknown command: %s", args[0]))
|
||||||
|
}
|
||||||
|
|
||||||
|
usageForCommand(command, true)
|
||||||
|
}
|
||||||
|
}
|
52
vendor/github.com/onsi/ginkgo/ginkgo/interrupthandler/interrupt_handler.go
generated
vendored
Normal file
52
vendor/github.com/onsi/ginkgo/ginkgo/interrupthandler/interrupt_handler.go
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
package interrupthandler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"sync"
|
||||||
|
"syscall"
|
||||||
|
)
|
||||||
|
|
||||||
|
type InterruptHandler struct {
|
||||||
|
interruptCount int
|
||||||
|
lock *sync.Mutex
|
||||||
|
C chan bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewInterruptHandler() *InterruptHandler {
|
||||||
|
h := &InterruptHandler{
|
||||||
|
lock: &sync.Mutex{},
|
||||||
|
C: make(chan bool, 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
go h.handleInterrupt()
|
||||||
|
SwallowSigQuit()
|
||||||
|
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *InterruptHandler) WasInterrupted() bool {
|
||||||
|
h.lock.Lock()
|
||||||
|
defer h.lock.Unlock()
|
||||||
|
|
||||||
|
return h.interruptCount > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *InterruptHandler) handleInterrupt() {
|
||||||
|
c := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
|
||||||
|
|
||||||
|
<-c
|
||||||
|
signal.Stop(c)
|
||||||
|
|
||||||
|
h.lock.Lock()
|
||||||
|
h.interruptCount++
|
||||||
|
if h.interruptCount == 1 {
|
||||||
|
close(h.C)
|
||||||
|
} else if h.interruptCount > 5 {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
h.lock.Unlock()
|
||||||
|
|
||||||
|
go h.handleInterrupt()
|
||||||
|
}
|
14
vendor/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_unix.go
generated
vendored
Normal file
14
vendor/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_unix.go
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
// +build freebsd openbsd netbsd dragonfly darwin linux solaris
|
||||||
|
|
||||||
|
package interrupthandler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
)
|
||||||
|
|
||||||
|
func SwallowSigQuit() {
|
||||||
|
c := make(chan os.Signal, 1024)
|
||||||
|
signal.Notify(c, syscall.SIGQUIT)
|
||||||
|
}
|
7
vendor/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_windows.go
generated
vendored
Normal file
7
vendor/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_windows.go
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
// +build windows
|
||||||
|
|
||||||
|
package interrupthandler
|
||||||
|
|
||||||
|
func SwallowSigQuit() {
|
||||||
|
//noop
|
||||||
|
}
|
296
vendor/github.com/onsi/ginkgo/ginkgo/main.go
generated
vendored
Normal file
296
vendor/github.com/onsi/ginkgo/ginkgo/main.go
generated
vendored
Normal file
@ -0,0 +1,296 @@
|
|||||||
|
/*
|
||||||
|
The Ginkgo CLI
|
||||||
|
|
||||||
|
The Ginkgo CLI is fully documented [here](http://onsi.github.io/ginkgo/#the_ginkgo_cli)
|
||||||
|
|
||||||
|
You can also learn more by running:
|
||||||
|
|
||||||
|
ginkgo help
|
||||||
|
|
||||||
|
Here are some of the more commonly used commands:
|
||||||
|
|
||||||
|
To install:
|
||||||
|
|
||||||
|
go install github.com/onsi/ginkgo/ginkgo
|
||||||
|
|
||||||
|
To run tests:
|
||||||
|
|
||||||
|
ginkgo
|
||||||
|
|
||||||
|
To run tests in all subdirectories:
|
||||||
|
|
||||||
|
ginkgo -r
|
||||||
|
|
||||||
|
To run tests in particular packages:
|
||||||
|
|
||||||
|
ginkgo <flags> /path/to/package /path/to/another/package
|
||||||
|
|
||||||
|
To pass arguments/flags to your tests:
|
||||||
|
|
||||||
|
ginkgo <flags> <packages> -- <pass-throughs>
|
||||||
|
|
||||||
|
To run tests in parallel
|
||||||
|
|
||||||
|
ginkgo -p
|
||||||
|
|
||||||
|
this will automatically detect the optimal number of nodes to use. Alternatively, you can specify the number of nodes with:
|
||||||
|
|
||||||
|
ginkgo -nodes=N
|
||||||
|
|
||||||
|
(note that you don't need to provide -p in this case).
|
||||||
|
|
||||||
|
By default the Ginkgo CLI will spin up a server that the individual test processes send test output to. The CLI aggregates this output and then presents coherent test output, one test at a time, as each test completes.
|
||||||
|
An alternative is to have the parallel nodes run and stream interleaved output back. This useful for debugging, particularly in contexts where tests hang/fail to start. To get this interleaved output:
|
||||||
|
|
||||||
|
ginkgo -nodes=N -stream=true
|
||||||
|
|
||||||
|
On windows, the default value for stream is true.
|
||||||
|
|
||||||
|
By default, when running multiple tests (with -r or a list of packages) Ginkgo will abort when a test fails. To have Ginkgo run subsequent test suites instead you can:
|
||||||
|
|
||||||
|
ginkgo -keepGoing
|
||||||
|
|
||||||
|
To monitor packages and rerun tests when changes occur:
|
||||||
|
|
||||||
|
ginkgo watch <-r> </path/to/package>
|
||||||
|
|
||||||
|
passing `ginkgo watch` the `-r` flag will recursively detect all test suites under the current directory and monitor them.
|
||||||
|
`watch` does not detect *new* packages. Moreover, changes in package X only rerun the tests for package X, tests for packages
|
||||||
|
that depend on X are not rerun.
|
||||||
|
|
||||||
|
[OSX & Linux only] To receive (desktop) notifications when a test run completes:
|
||||||
|
|
||||||
|
ginkgo -notify
|
||||||
|
|
||||||
|
this is particularly useful with `ginkgo watch`. Notifications are currently only supported on OS X and require that you `brew install terminal-notifier`
|
||||||
|
|
||||||
|
Sometimes (to suss out race conditions/flakey tests, for example) you want to keep running a test suite until it fails. You can do this with:
|
||||||
|
|
||||||
|
ginkgo -untilItFails
|
||||||
|
|
||||||
|
To bootstrap a test suite:
|
||||||
|
|
||||||
|
ginkgo bootstrap
|
||||||
|
|
||||||
|
To generate a test file:
|
||||||
|
|
||||||
|
ginkgo generate <test_file_name>
|
||||||
|
|
||||||
|
To bootstrap/generate test files without using "." imports:
|
||||||
|
|
||||||
|
ginkgo bootstrap --nodot
|
||||||
|
ginkgo generate --nodot
|
||||||
|
|
||||||
|
this will explicitly export all the identifiers in Ginkgo and Gomega allowing you to rename them to avoid collisions. When you pull to the latest Ginkgo/Gomega you'll want to run
|
||||||
|
|
||||||
|
ginkgo nodot
|
||||||
|
|
||||||
|
to refresh this list and pull in any new identifiers. In particular, this will pull in any new Gomega matchers that get added.
|
||||||
|
|
||||||
|
To convert an existing XUnit style test suite to a Ginkgo-style test suite:
|
||||||
|
|
||||||
|
ginkgo convert .
|
||||||
|
|
||||||
|
To unfocus tests:
|
||||||
|
|
||||||
|
ginkgo unfocus
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
ginkgo blur
|
||||||
|
|
||||||
|
To compile a test suite:
|
||||||
|
|
||||||
|
ginkgo build <path-to-package>
|
||||||
|
|
||||||
|
will output an executable file named `package.test`. This can be run directly or by invoking
|
||||||
|
|
||||||
|
ginkgo <path-to-package.test>
|
||||||
|
|
||||||
|
To print out Ginkgo's version:
|
||||||
|
|
||||||
|
ginkgo version
|
||||||
|
|
||||||
|
To get more help:
|
||||||
|
|
||||||
|
ginkgo help
|
||||||
|
*/
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
)
|
||||||
|
|
||||||
|
const greenColor = "\x1b[32m"
|
||||||
|
const redColor = "\x1b[91m"
|
||||||
|
const defaultStyle = "\x1b[0m"
|
||||||
|
const lightGrayColor = "\x1b[37m"
|
||||||
|
|
||||||
|
type Command struct {
|
||||||
|
Name string
|
||||||
|
AltName string
|
||||||
|
FlagSet *flag.FlagSet
|
||||||
|
Usage []string
|
||||||
|
UsageCommand string
|
||||||
|
Command func(args []string, additionalArgs []string)
|
||||||
|
SuppressFlagDocumentation bool
|
||||||
|
FlagDocSubstitute []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Command) Matches(name string) bool {
|
||||||
|
return c.Name == name || (c.AltName != "" && c.AltName == name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Command) Run(args []string, additionalArgs []string) {
|
||||||
|
c.FlagSet.Parse(args)
|
||||||
|
c.Command(c.FlagSet.Args(), additionalArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
var DefaultCommand *Command
|
||||||
|
var Commands []*Command
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
DefaultCommand = BuildRunCommand()
|
||||||
|
Commands = append(Commands, BuildWatchCommand())
|
||||||
|
Commands = append(Commands, BuildBuildCommand())
|
||||||
|
Commands = append(Commands, BuildBootstrapCommand())
|
||||||
|
Commands = append(Commands, BuildGenerateCommand())
|
||||||
|
Commands = append(Commands, BuildNodotCommand())
|
||||||
|
Commands = append(Commands, BuildConvertCommand())
|
||||||
|
Commands = append(Commands, BuildUnfocusCommand())
|
||||||
|
Commands = append(Commands, BuildVersionCommand())
|
||||||
|
Commands = append(Commands, BuildHelpCommand())
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
args := []string{}
|
||||||
|
additionalArgs := []string{}
|
||||||
|
|
||||||
|
foundDelimiter := false
|
||||||
|
|
||||||
|
for _, arg := range os.Args[1:] {
|
||||||
|
if !foundDelimiter {
|
||||||
|
if arg == "--" {
|
||||||
|
foundDelimiter = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if foundDelimiter {
|
||||||
|
additionalArgs = append(additionalArgs, arg)
|
||||||
|
} else {
|
||||||
|
args = append(args, arg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(args) > 0 {
|
||||||
|
commandToRun, found := commandMatching(args[0])
|
||||||
|
if found {
|
||||||
|
commandToRun.Run(args[1:], additionalArgs)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
DefaultCommand.Run(args, additionalArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func commandMatching(name string) (*Command, bool) {
|
||||||
|
for _, command := range Commands {
|
||||||
|
if command.Matches(name) {
|
||||||
|
return command, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "Ginkgo Version %s\n\n", config.VERSION)
|
||||||
|
usageForCommand(DefaultCommand, false)
|
||||||
|
for _, command := range Commands {
|
||||||
|
fmt.Fprintf(os.Stderr, "\n")
|
||||||
|
usageForCommand(command, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func usageForCommand(command *Command, longForm bool) {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s\n%s\n", command.UsageCommand, strings.Repeat("-", len(command.UsageCommand)))
|
||||||
|
fmt.Fprintf(os.Stderr, "%s\n", strings.Join(command.Usage, "\n"))
|
||||||
|
if command.SuppressFlagDocumentation && !longForm {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s\n", strings.Join(command.FlagDocSubstitute, "\n "))
|
||||||
|
} else {
|
||||||
|
command.FlagSet.PrintDefaults()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func complainAndQuit(complaint string) {
|
||||||
|
fmt.Fprintf(os.Stderr, "%s\nFor usage instructions:\n\tginkgo help\n", complaint)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func findSuites(args []string, recurseForAll bool, skipPackage string, allowPrecompiled bool) ([]testsuite.TestSuite, []string) {
|
||||||
|
suites := []testsuite.TestSuite{}
|
||||||
|
|
||||||
|
if len(args) > 0 {
|
||||||
|
for _, arg := range args {
|
||||||
|
if allowPrecompiled {
|
||||||
|
suite, err := testsuite.PrecompiledTestSuite(arg)
|
||||||
|
if err == nil {
|
||||||
|
suites = append(suites, suite)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
recurseForSuite := recurseForAll
|
||||||
|
if strings.HasSuffix(arg, "/...") && arg != "/..." {
|
||||||
|
arg = arg[:len(arg)-4]
|
||||||
|
recurseForSuite = true
|
||||||
|
}
|
||||||
|
suites = append(suites, testsuite.SuitesInDir(arg, recurseForSuite)...)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
suites = testsuite.SuitesInDir(".", recurseForAll)
|
||||||
|
}
|
||||||
|
|
||||||
|
skippedPackages := []string{}
|
||||||
|
if skipPackage != "" {
|
||||||
|
skipFilters := strings.Split(skipPackage, ",")
|
||||||
|
filteredSuites := []testsuite.TestSuite{}
|
||||||
|
for _, suite := range suites {
|
||||||
|
skip := false
|
||||||
|
for _, skipFilter := range skipFilters {
|
||||||
|
if strings.Contains(suite.Path, skipFilter) {
|
||||||
|
skip = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if skip {
|
||||||
|
skippedPackages = append(skippedPackages, suite.Path)
|
||||||
|
} else {
|
||||||
|
filteredSuites = append(filteredSuites, suite)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
suites = filteredSuites
|
||||||
|
}
|
||||||
|
|
||||||
|
return suites, skippedPackages
|
||||||
|
}
|
||||||
|
|
||||||
|
func goFmt(path string) {
|
||||||
|
err := exec.Command("go", "fmt", path).Run()
|
||||||
|
if err != nil {
|
||||||
|
complainAndQuit("Could not fmt: " + err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func pluralizedWord(singular, plural string, count int) string {
|
||||||
|
if count == 1 {
|
||||||
|
return singular
|
||||||
|
}
|
||||||
|
return plural
|
||||||
|
}
|
194
vendor/github.com/onsi/ginkgo/ginkgo/nodot/nodot.go
generated
vendored
Normal file
194
vendor/github.com/onsi/ginkgo/ginkgo/nodot/nodot.go
generated
vendored
Normal file
@ -0,0 +1,194 @@
|
|||||||
|
package nodot
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ApplyNoDot(data []byte) ([]byte, error) {
|
||||||
|
sections, err := generateNodotSections()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, section := range sections {
|
||||||
|
data = section.createOrUpdateIn(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type nodotSection struct {
|
||||||
|
name string
|
||||||
|
pkg string
|
||||||
|
declarations []string
|
||||||
|
types []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s nodotSection) createOrUpdateIn(data []byte) []byte {
|
||||||
|
renames := map[string]string{}
|
||||||
|
|
||||||
|
contents := string(data)
|
||||||
|
|
||||||
|
lines := strings.Split(contents, "\n")
|
||||||
|
|
||||||
|
comment := "// Declarations for " + s.name
|
||||||
|
|
||||||
|
newLines := []string{}
|
||||||
|
for _, line := range lines {
|
||||||
|
if line == comment {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
words := strings.Split(line, " ")
|
||||||
|
lastWord := words[len(words)-1]
|
||||||
|
|
||||||
|
if s.containsDeclarationOrType(lastWord) {
|
||||||
|
renames[lastWord] = words[1]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
newLines = append(newLines, line)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(newLines[len(newLines)-1]) > 0 {
|
||||||
|
newLines = append(newLines, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
newLines = append(newLines, comment)
|
||||||
|
|
||||||
|
for _, typ := range s.types {
|
||||||
|
name, ok := renames[s.prefix(typ)]
|
||||||
|
if !ok {
|
||||||
|
name = typ
|
||||||
|
}
|
||||||
|
newLines = append(newLines, fmt.Sprintf("type %s %s", name, s.prefix(typ)))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, decl := range s.declarations {
|
||||||
|
name, ok := renames[s.prefix(decl)]
|
||||||
|
if !ok {
|
||||||
|
name = decl
|
||||||
|
}
|
||||||
|
newLines = append(newLines, fmt.Sprintf("var %s = %s", name, s.prefix(decl)))
|
||||||
|
}
|
||||||
|
|
||||||
|
newLines = append(newLines, "")
|
||||||
|
|
||||||
|
newContents := strings.Join(newLines, "\n")
|
||||||
|
|
||||||
|
return []byte(newContents)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s nodotSection) prefix(declOrType string) string {
|
||||||
|
return s.pkg + "." + declOrType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s nodotSection) containsDeclarationOrType(word string) bool {
|
||||||
|
for _, declaration := range s.declarations {
|
||||||
|
if s.prefix(declaration) == word {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, typ := range s.types {
|
||||||
|
if s.prefix(typ) == word {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateNodotSections() ([]nodotSection, error) {
|
||||||
|
sections := []nodotSection{}
|
||||||
|
|
||||||
|
declarations, err := getExportedDeclerationsForPackage("github.com/onsi/ginkgo", "ginkgo_dsl.go", "GINKGO_VERSION", "GINKGO_PANIC")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sections = append(sections, nodotSection{
|
||||||
|
name: "Ginkgo DSL",
|
||||||
|
pkg: "ginkgo",
|
||||||
|
declarations: declarations,
|
||||||
|
types: []string{"Done", "Benchmarker"},
|
||||||
|
})
|
||||||
|
|
||||||
|
declarations, err = getExportedDeclerationsForPackage("github.com/onsi/gomega", "gomega_dsl.go", "GOMEGA_VERSION")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sections = append(sections, nodotSection{
|
||||||
|
name: "Gomega DSL",
|
||||||
|
pkg: "gomega",
|
||||||
|
declarations: declarations,
|
||||||
|
})
|
||||||
|
|
||||||
|
declarations, err = getExportedDeclerationsForPackage("github.com/onsi/gomega", "matchers.go")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sections = append(sections, nodotSection{
|
||||||
|
name: "Gomega Matchers",
|
||||||
|
pkg: "gomega",
|
||||||
|
declarations: declarations,
|
||||||
|
})
|
||||||
|
|
||||||
|
return sections, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getExportedDeclerationsForPackage(pkgPath string, filename string, blacklist ...string) ([]string, error) {
|
||||||
|
pkg, err := build.Import(pkgPath, ".", 0)
|
||||||
|
if err != nil {
|
||||||
|
return []string{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
declarations, err := getExportedDeclarationsForFile(filepath.Join(pkg.Dir, filename))
|
||||||
|
if err != nil {
|
||||||
|
return []string{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
blacklistLookup := map[string]bool{}
|
||||||
|
for _, declaration := range blacklist {
|
||||||
|
blacklistLookup[declaration] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
filteredDeclarations := []string{}
|
||||||
|
for _, declaration := range declarations {
|
||||||
|
if blacklistLookup[declaration] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filteredDeclarations = append(filteredDeclarations, declaration)
|
||||||
|
}
|
||||||
|
|
||||||
|
return filteredDeclarations, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getExportedDeclarationsForFile(path string) ([]string, error) {
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
tree, err := parser.ParseFile(fset, path, nil, 0)
|
||||||
|
if err != nil {
|
||||||
|
return []string{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
declarations := []string{}
|
||||||
|
ast.FileExports(tree)
|
||||||
|
for _, decl := range tree.Decls {
|
||||||
|
switch x := decl.(type) {
|
||||||
|
case *ast.GenDecl:
|
||||||
|
switch s := x.Specs[0].(type) {
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
declarations = append(declarations, s.Names[0].Name)
|
||||||
|
}
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
declarations = append(declarations, x.Name.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return declarations, nil
|
||||||
|
}
|
91
vendor/github.com/onsi/ginkgo/ginkgo/nodot/nodot_suite_test.go
generated
vendored
Normal file
91
vendor/github.com/onsi/ginkgo/ginkgo/nodot/nodot_suite_test.go
generated
vendored
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package nodot_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/onsi/ginkgo"
|
||||||
|
"github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNodot(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Nodot Suite")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Declarations for Ginkgo DSL
|
||||||
|
type Done ginkgo.Done
|
||||||
|
type Benchmarker ginkgo.Benchmarker
|
||||||
|
|
||||||
|
var GinkgoWriter = ginkgo.GinkgoWriter
|
||||||
|
var GinkgoParallelNode = ginkgo.GinkgoParallelNode
|
||||||
|
var GinkgoT = ginkgo.GinkgoT
|
||||||
|
var CurrentGinkgoTestDescription = ginkgo.CurrentGinkgoTestDescription
|
||||||
|
var RunSpecs = ginkgo.RunSpecs
|
||||||
|
var RunSpecsWithDefaultAndCustomReporters = ginkgo.RunSpecsWithDefaultAndCustomReporters
|
||||||
|
var RunSpecsWithCustomReporters = ginkgo.RunSpecsWithCustomReporters
|
||||||
|
var Fail = ginkgo.Fail
|
||||||
|
var GinkgoRecover = ginkgo.GinkgoRecover
|
||||||
|
var Describe = ginkgo.Describe
|
||||||
|
var FDescribe = ginkgo.FDescribe
|
||||||
|
var PDescribe = ginkgo.PDescribe
|
||||||
|
var XDescribe = ginkgo.XDescribe
|
||||||
|
var Context = ginkgo.Context
|
||||||
|
var FContext = ginkgo.FContext
|
||||||
|
var PContext = ginkgo.PContext
|
||||||
|
var XContext = ginkgo.XContext
|
||||||
|
var It = ginkgo.It
|
||||||
|
var FIt = ginkgo.FIt
|
||||||
|
var PIt = ginkgo.PIt
|
||||||
|
var XIt = ginkgo.XIt
|
||||||
|
var Measure = ginkgo.Measure
|
||||||
|
var FMeasure = ginkgo.FMeasure
|
||||||
|
var PMeasure = ginkgo.PMeasure
|
||||||
|
var XMeasure = ginkgo.XMeasure
|
||||||
|
var BeforeSuite = ginkgo.BeforeSuite
|
||||||
|
var AfterSuite = ginkgo.AfterSuite
|
||||||
|
var SynchronizedBeforeSuite = ginkgo.SynchronizedBeforeSuite
|
||||||
|
var SynchronizedAfterSuite = ginkgo.SynchronizedAfterSuite
|
||||||
|
var BeforeEach = ginkgo.BeforeEach
|
||||||
|
var JustBeforeEach = ginkgo.JustBeforeEach
|
||||||
|
var AfterEach = ginkgo.AfterEach
|
||||||
|
|
||||||
|
// Declarations for Gomega DSL
|
||||||
|
var RegisterFailHandler = gomega.RegisterFailHandler
|
||||||
|
var RegisterTestingT = gomega.RegisterTestingT
|
||||||
|
var InterceptGomegaFailures = gomega.InterceptGomegaFailures
|
||||||
|
var Ω = gomega.Ω
|
||||||
|
var Expect = gomega.Expect
|
||||||
|
var ExpectWithOffset = gomega.ExpectWithOffset
|
||||||
|
var Eventually = gomega.Eventually
|
||||||
|
var EventuallyWithOffset = gomega.EventuallyWithOffset
|
||||||
|
var Consistently = gomega.Consistently
|
||||||
|
var ConsistentlyWithOffset = gomega.ConsistentlyWithOffset
|
||||||
|
var SetDefaultEventuallyTimeout = gomega.SetDefaultEventuallyTimeout
|
||||||
|
var SetDefaultEventuallyPollingInterval = gomega.SetDefaultEventuallyPollingInterval
|
||||||
|
var SetDefaultConsistentlyDuration = gomega.SetDefaultConsistentlyDuration
|
||||||
|
var SetDefaultConsistentlyPollingInterval = gomega.SetDefaultConsistentlyPollingInterval
|
||||||
|
|
||||||
|
// Declarations for Gomega Matchers
|
||||||
|
var Equal = gomega.Equal
|
||||||
|
var BeEquivalentTo = gomega.BeEquivalentTo
|
||||||
|
var BeNil = gomega.BeNil
|
||||||
|
var BeTrue = gomega.BeTrue
|
||||||
|
var BeFalse = gomega.BeFalse
|
||||||
|
var HaveOccurred = gomega.HaveOccurred
|
||||||
|
var MatchError = gomega.MatchError
|
||||||
|
var BeClosed = gomega.BeClosed
|
||||||
|
var Receive = gomega.Receive
|
||||||
|
var MatchRegexp = gomega.MatchRegexp
|
||||||
|
var ContainSubstring = gomega.ContainSubstring
|
||||||
|
var MatchJSON = gomega.MatchJSON
|
||||||
|
var BeEmpty = gomega.BeEmpty
|
||||||
|
var HaveLen = gomega.HaveLen
|
||||||
|
var BeZero = gomega.BeZero
|
||||||
|
var ContainElement = gomega.ContainElement
|
||||||
|
var ConsistOf = gomega.ConsistOf
|
||||||
|
var HaveKey = gomega.HaveKey
|
||||||
|
var HaveKeyWithValue = gomega.HaveKeyWithValue
|
||||||
|
var BeNumerically = gomega.BeNumerically
|
||||||
|
var BeTemporally = gomega.BeTemporally
|
||||||
|
var BeAssignableToTypeOf = gomega.BeAssignableToTypeOf
|
||||||
|
var Panic = gomega.Panic
|
81
vendor/github.com/onsi/ginkgo/ginkgo/nodot/nodot_test.go
generated
vendored
Normal file
81
vendor/github.com/onsi/ginkgo/ginkgo/nodot/nodot_test.go
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
package nodot_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo/ginkgo/nodot"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("ApplyNoDot", func() {
|
||||||
|
var result string
|
||||||
|
|
||||||
|
apply := func(input string) string {
|
||||||
|
output, err := ApplyNoDot([]byte(input))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
return string(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
Context("when no declarations have been imported yet", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
result = apply("")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should add headings for the various declarations", func() {
|
||||||
|
Ω(result).Should(ContainSubstring("// Declarations for Ginkgo DSL"))
|
||||||
|
Ω(result).Should(ContainSubstring("// Declarations for Gomega DSL"))
|
||||||
|
Ω(result).Should(ContainSubstring("// Declarations for Gomega Matchers"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should import Ginkgo's declarations", func() {
|
||||||
|
Ω(result).Should(ContainSubstring("var It = ginkgo.It"))
|
||||||
|
Ω(result).Should(ContainSubstring("var XDescribe = ginkgo.XDescribe"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should import Ginkgo's types", func() {
|
||||||
|
Ω(result).Should(ContainSubstring("type Done ginkgo.Done"))
|
||||||
|
Ω(result).Should(ContainSubstring("type Benchmarker ginkgo.Benchmarker"))
|
||||||
|
Ω(strings.Count(result, "type ")).Should(Equal(2))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should import Gomega's DSL and matchers", func() {
|
||||||
|
Ω(result).Should(ContainSubstring("var Ω = gomega.Ω"))
|
||||||
|
Ω(result).Should(ContainSubstring("var ContainSubstring = gomega.ContainSubstring"))
|
||||||
|
Ω(result).Should(ContainSubstring("var Equal = gomega.Equal"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not import blacklisted things", func() {
|
||||||
|
Ω(result).ShouldNot(ContainSubstring("GINKGO_VERSION"))
|
||||||
|
Ω(result).ShouldNot(ContainSubstring("GINKGO_PANIC"))
|
||||||
|
Ω(result).ShouldNot(ContainSubstring("GOMEGA_VERSION"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be idempotent (module empty lines - go fmt can fix those for us)", func() {
|
||||||
|
first := apply("")
|
||||||
|
second := apply(first)
|
||||||
|
first = strings.Trim(first, "\n")
|
||||||
|
second = strings.Trim(second, "\n")
|
||||||
|
Ω(first).Should(Equal(second))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not mess with other things in the input", func() {
|
||||||
|
result = apply("var MyThing = SomethingThatsMine")
|
||||||
|
Ω(result).Should(ContainSubstring("var MyThing = SomethingThatsMine"))
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the user has redefined a name", func() {
|
||||||
|
It("should honor the redefinition", func() {
|
||||||
|
result = apply(`
|
||||||
|
var _ = gomega.Ω
|
||||||
|
var When = ginkgo.It
|
||||||
|
`)
|
||||||
|
|
||||||
|
Ω(result).Should(ContainSubstring("var _ = gomega.Ω"))
|
||||||
|
Ω(result).ShouldNot(ContainSubstring("var Ω = gomega.Ω"))
|
||||||
|
|
||||||
|
Ω(result).Should(ContainSubstring("var When = ginkgo.It"))
|
||||||
|
Ω(result).ShouldNot(ContainSubstring("var It = ginkgo.It"))
|
||||||
|
|
||||||
|
Ω(result).Should(ContainSubstring("var Context = ginkgo.Context"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
76
vendor/github.com/onsi/ginkgo/ginkgo/nodot_command.go
generated
vendored
Normal file
76
vendor/github.com/onsi/ginkgo/ginkgo/nodot_command.go
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"flag"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/nodot"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildNodotCommand() *Command {
|
||||||
|
return &Command{
|
||||||
|
Name: "nodot",
|
||||||
|
FlagSet: flag.NewFlagSet("bootstrap", flag.ExitOnError),
|
||||||
|
UsageCommand: "ginkgo nodot",
|
||||||
|
Usage: []string{
|
||||||
|
"Update the nodot declarations in your test suite",
|
||||||
|
"Any missing declarations (from, say, a recently added matcher) will be added to your bootstrap file.",
|
||||||
|
"If you've renamed a declaration, that name will be honored and not overwritten.",
|
||||||
|
},
|
||||||
|
Command: updateNodot,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func updateNodot(args []string, additionalArgs []string) {
|
||||||
|
suiteFile, perm := findSuiteFile()
|
||||||
|
|
||||||
|
data, err := ioutil.ReadFile(suiteFile)
|
||||||
|
if err != nil {
|
||||||
|
complainAndQuit("Failed to update nodot declarations: " + err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := nodot.ApplyNoDot(data)
|
||||||
|
if err != nil {
|
||||||
|
complainAndQuit("Failed to update nodot declarations: " + err.Error())
|
||||||
|
}
|
||||||
|
ioutil.WriteFile(suiteFile, content, perm)
|
||||||
|
|
||||||
|
goFmt(suiteFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func findSuiteFile() (string, os.FileMode) {
|
||||||
|
workingDir, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
complainAndQuit("Could not find suite file for nodot: " + err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
files, err := ioutil.ReadDir(workingDir)
|
||||||
|
if err != nil {
|
||||||
|
complainAndQuit("Could not find suite file for nodot: " + err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
re := regexp.MustCompile(`RunSpecs\(|RunSpecsWithDefaultAndCustomReporters\(|RunSpecsWithCustomReporters\(`)
|
||||||
|
|
||||||
|
for _, file := range files {
|
||||||
|
if file.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
path := filepath.Join(workingDir, file.Name())
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
complainAndQuit("Could not find suite file for nodot: " + err.Error())
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
if re.MatchReader(bufio.NewReader(f)) {
|
||||||
|
return path, file.Mode()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
complainAndQuit("Could not find a suite file for nodot: you need a bootstrap file that call's Ginkgo's RunSpecs() command.\nTry running ginkgo bootstrap first.")
|
||||||
|
|
||||||
|
return "", 0
|
||||||
|
}
|
141
vendor/github.com/onsi/ginkgo/ginkgo/notifications.go
generated
vendored
Normal file
141
vendor/github.com/onsi/ginkgo/ginkgo/notifications.go
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Notifier struct {
|
||||||
|
commandFlags *RunWatchAndBuildCommandFlags
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewNotifier(commandFlags *RunWatchAndBuildCommandFlags) *Notifier {
|
||||||
|
return &Notifier{
|
||||||
|
commandFlags: commandFlags,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Notifier) VerifyNotificationsAreAvailable() {
|
||||||
|
if n.commandFlags.Notify {
|
||||||
|
onLinux := (runtime.GOOS == "linux")
|
||||||
|
onOSX := (runtime.GOOS == "darwin")
|
||||||
|
if onOSX {
|
||||||
|
|
||||||
|
_, err := exec.LookPath("terminal-notifier")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf(`--notify requires terminal-notifier, which you don't seem to have installed.
|
||||||
|
|
||||||
|
OSX:
|
||||||
|
|
||||||
|
To remedy this:
|
||||||
|
|
||||||
|
brew install terminal-notifier
|
||||||
|
|
||||||
|
To learn more about terminal-notifier:
|
||||||
|
|
||||||
|
https://github.com/alloy/terminal-notifier
|
||||||
|
`)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if onLinux {
|
||||||
|
|
||||||
|
_, err := exec.LookPath("notify-send")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf(`--notify requires terminal-notifier or notify-send, which you don't seem to have installed.
|
||||||
|
|
||||||
|
Linux:
|
||||||
|
|
||||||
|
Download and install notify-send for your distribution
|
||||||
|
`)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Notifier) SendSuiteCompletionNotification(suite testsuite.TestSuite, suitePassed bool) {
|
||||||
|
if suitePassed {
|
||||||
|
n.SendNotification("Ginkgo [PASS]", fmt.Sprintf(`Test suite for "%s" passed.`, suite.PackageName))
|
||||||
|
} else {
|
||||||
|
n.SendNotification("Ginkgo [FAIL]", fmt.Sprintf(`Test suite for "%s" failed.`, suite.PackageName))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Notifier) SendNotification(title string, subtitle string) {
|
||||||
|
|
||||||
|
if n.commandFlags.Notify {
|
||||||
|
onLinux := (runtime.GOOS == "linux")
|
||||||
|
onOSX := (runtime.GOOS == "darwin")
|
||||||
|
|
||||||
|
if onOSX {
|
||||||
|
|
||||||
|
_, err := exec.LookPath("terminal-notifier")
|
||||||
|
if err == nil {
|
||||||
|
args := []string{"-title", title, "-subtitle", subtitle, "-group", "com.onsi.ginkgo"}
|
||||||
|
terminal := os.Getenv("TERM_PROGRAM")
|
||||||
|
if terminal == "iTerm.app" {
|
||||||
|
args = append(args, "-activate", "com.googlecode.iterm2")
|
||||||
|
} else if terminal == "Apple_Terminal" {
|
||||||
|
args = append(args, "-activate", "com.apple.Terminal")
|
||||||
|
}
|
||||||
|
|
||||||
|
exec.Command("terminal-notifier", args...).Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if onLinux {
|
||||||
|
|
||||||
|
_, err := exec.LookPath("notify-send")
|
||||||
|
if err == nil {
|
||||||
|
args := []string{"-a", "ginkgo", title, subtitle}
|
||||||
|
exec.Command("notify-send", args...).Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *Notifier) RunCommand(suite testsuite.TestSuite, suitePassed bool) {
|
||||||
|
|
||||||
|
command := n.commandFlags.AfterSuiteHook
|
||||||
|
if command != "" {
|
||||||
|
|
||||||
|
// Allow for string replacement to pass input to the command
|
||||||
|
passed := "[FAIL]"
|
||||||
|
if suitePassed {
|
||||||
|
passed = "[PASS]"
|
||||||
|
}
|
||||||
|
command = strings.Replace(command, "(ginkgo-suite-passed)", passed, -1)
|
||||||
|
command = strings.Replace(command, "(ginkgo-suite-name)", suite.PackageName, -1)
|
||||||
|
|
||||||
|
// Must break command into parts
|
||||||
|
splitArgs := regexp.MustCompile(`'.+'|".+"|\S+`)
|
||||||
|
parts := splitArgs.FindAllString(command, -1)
|
||||||
|
|
||||||
|
output, err := exec.Command(parts[0], parts[1:]...).CombinedOutput()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Post-suite command failed:")
|
||||||
|
if config.DefaultReporterConfig.NoColor {
|
||||||
|
fmt.Printf("\t%s\n", output)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("\t%s%s%s\n", redColor, string(output), defaultStyle)
|
||||||
|
}
|
||||||
|
n.SendNotification("Ginkgo [ERROR]", fmt.Sprintf(`After suite command "%s" failed`, n.commandFlags.AfterSuiteHook))
|
||||||
|
} else {
|
||||||
|
fmt.Println("Post-suite command succeeded:")
|
||||||
|
if config.DefaultReporterConfig.NoColor {
|
||||||
|
fmt.Printf("\t%s\n", output)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("\t%s%s%s\n", greenColor, string(output), defaultStyle)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
192
vendor/github.com/onsi/ginkgo/ginkgo/run_command.go
generated
vendored
Normal file
192
vendor/github.com/onsi/ginkgo/ginkgo/run_command.go
generated
vendored
Normal file
@ -0,0 +1,192 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"math/rand"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/interrupthandler"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testrunner"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildRunCommand() *Command {
|
||||||
|
commandFlags := NewRunCommandFlags(flag.NewFlagSet("ginkgo", flag.ExitOnError))
|
||||||
|
notifier := NewNotifier(commandFlags)
|
||||||
|
interruptHandler := interrupthandler.NewInterruptHandler()
|
||||||
|
runner := &SpecRunner{
|
||||||
|
commandFlags: commandFlags,
|
||||||
|
notifier: notifier,
|
||||||
|
interruptHandler: interruptHandler,
|
||||||
|
suiteRunner: NewSuiteRunner(notifier, interruptHandler),
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Command{
|
||||||
|
Name: "",
|
||||||
|
FlagSet: commandFlags.FlagSet,
|
||||||
|
UsageCommand: "ginkgo <FLAGS> <PACKAGES> -- <PASS-THROUGHS>",
|
||||||
|
Usage: []string{
|
||||||
|
"Run the tests in the passed in <PACKAGES> (or the package in the current directory if left blank).",
|
||||||
|
"Any arguments after -- will be passed to the test.",
|
||||||
|
"Accepts the following flags:",
|
||||||
|
},
|
||||||
|
Command: runner.RunSpecs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type SpecRunner struct {
|
||||||
|
commandFlags *RunWatchAndBuildCommandFlags
|
||||||
|
notifier *Notifier
|
||||||
|
interruptHandler *interrupthandler.InterruptHandler
|
||||||
|
suiteRunner *SuiteRunner
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SpecRunner) RunSpecs(args []string, additionalArgs []string) {
|
||||||
|
r.commandFlags.computeNodes()
|
||||||
|
r.notifier.VerifyNotificationsAreAvailable()
|
||||||
|
|
||||||
|
suites, skippedPackages := findSuites(args, r.commandFlags.Recurse, r.commandFlags.SkipPackage, true)
|
||||||
|
if len(skippedPackages) > 0 {
|
||||||
|
fmt.Println("Will skip:")
|
||||||
|
for _, skippedPackage := range skippedPackages {
|
||||||
|
fmt.Println(" " + skippedPackage)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(skippedPackages) > 0 && len(suites) == 0 {
|
||||||
|
fmt.Println("All tests skipped! Exiting...")
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(suites) == 0 {
|
||||||
|
complainAndQuit("Found no test suites")
|
||||||
|
}
|
||||||
|
|
||||||
|
r.ComputeSuccinctMode(len(suites))
|
||||||
|
|
||||||
|
t := time.Now()
|
||||||
|
|
||||||
|
runners := []*testrunner.TestRunner{}
|
||||||
|
for _, suite := range suites {
|
||||||
|
runners = append(runners, testrunner.New(suite, r.commandFlags.NumCPU, r.commandFlags.ParallelStream, r.commandFlags.GoOpts, additionalArgs))
|
||||||
|
}
|
||||||
|
|
||||||
|
numSuites := 0
|
||||||
|
runResult := testrunner.PassingRunResult()
|
||||||
|
if r.commandFlags.UntilItFails {
|
||||||
|
iteration := 0
|
||||||
|
for {
|
||||||
|
r.UpdateSeed()
|
||||||
|
randomizedRunners := r.randomizeOrder(runners)
|
||||||
|
runResult, numSuites = r.suiteRunner.RunSuites(randomizedRunners, r.commandFlags.NumCompilers, r.commandFlags.KeepGoing, nil)
|
||||||
|
iteration++
|
||||||
|
|
||||||
|
if r.interruptHandler.WasInterrupted() {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if runResult.Passed {
|
||||||
|
fmt.Printf("\nAll tests passed...\nWill keep running them until they fail.\nThis was attempt #%d\n%s\n", iteration, orcMessage(iteration))
|
||||||
|
} else {
|
||||||
|
fmt.Printf("\nTests failed on attempt #%d\n\n", iteration)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
randomizedRunners := r.randomizeOrder(runners)
|
||||||
|
runResult, numSuites = r.suiteRunner.RunSuites(randomizedRunners, r.commandFlags.NumCompilers, r.commandFlags.KeepGoing, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, runner := range runners {
|
||||||
|
runner.CleanUp()
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("\nGinkgo ran %d %s in %s\n", numSuites, pluralizedWord("suite", "suites", numSuites), time.Since(t))
|
||||||
|
|
||||||
|
if runResult.Passed {
|
||||||
|
if runResult.HasProgrammaticFocus {
|
||||||
|
fmt.Printf("Test Suite Passed\n")
|
||||||
|
fmt.Printf("Detected Programmatic Focus - setting exit status to %d\n", types.GINKGO_FOCUS_EXIT_CODE)
|
||||||
|
os.Exit(types.GINKGO_FOCUS_EXIT_CODE)
|
||||||
|
} else {
|
||||||
|
fmt.Printf("Test Suite Passed\n")
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Printf("Test Suite Failed\n")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SpecRunner) ComputeSuccinctMode(numSuites int) {
|
||||||
|
if config.DefaultReporterConfig.Verbose {
|
||||||
|
config.DefaultReporterConfig.Succinct = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if numSuites == 1 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if numSuites > 1 && !r.commandFlags.wasSet("succinct") {
|
||||||
|
config.DefaultReporterConfig.Succinct = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SpecRunner) UpdateSeed() {
|
||||||
|
if !r.commandFlags.wasSet("seed") {
|
||||||
|
config.GinkgoConfig.RandomSeed = time.Now().Unix()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SpecRunner) randomizeOrder(runners []*testrunner.TestRunner) []*testrunner.TestRunner {
|
||||||
|
if !r.commandFlags.RandomizeSuites {
|
||||||
|
return runners
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(runners) <= 1 {
|
||||||
|
return runners
|
||||||
|
}
|
||||||
|
|
||||||
|
randomizedRunners := make([]*testrunner.TestRunner, len(runners))
|
||||||
|
randomizer := rand.New(rand.NewSource(config.GinkgoConfig.RandomSeed))
|
||||||
|
permutation := randomizer.Perm(len(runners))
|
||||||
|
for i, j := range permutation {
|
||||||
|
randomizedRunners[i] = runners[j]
|
||||||
|
}
|
||||||
|
return randomizedRunners
|
||||||
|
}
|
||||||
|
|
||||||
|
func orcMessage(iteration int) string {
|
||||||
|
if iteration < 10 {
|
||||||
|
return ""
|
||||||
|
} else if iteration < 30 {
|
||||||
|
return []string{
|
||||||
|
"If at first you succeed...",
|
||||||
|
"...try, try again.",
|
||||||
|
"Looking good!",
|
||||||
|
"Still good...",
|
||||||
|
"I think your tests are fine....",
|
||||||
|
"Yep, still passing",
|
||||||
|
"Here we go again...",
|
||||||
|
"Even the gophers are getting bored",
|
||||||
|
"Did you try -race?",
|
||||||
|
"Maybe you should stop now?",
|
||||||
|
"I'm getting tired...",
|
||||||
|
"What if I just made you a sandwich?",
|
||||||
|
"Hit ^C, hit ^C, please hit ^C",
|
||||||
|
"Make it stop. Please!",
|
||||||
|
"Come on! Enough is enough!",
|
||||||
|
"Dave, this conversation can serve no purpose anymore. Goodbye.",
|
||||||
|
"Just what do you think you're doing, Dave? ",
|
||||||
|
"I, Sisyphus",
|
||||||
|
"Insanity: doing the same thing over and over again and expecting different results. -Einstein",
|
||||||
|
"I guess Einstein never tried to churn butter",
|
||||||
|
}[iteration-10] + "\n"
|
||||||
|
} else {
|
||||||
|
return "No, seriously... you can probably stop now.\n"
|
||||||
|
}
|
||||||
|
}
|
160
vendor/github.com/onsi/ginkgo/ginkgo/run_watch_and_build_command_flags.go
generated
vendored
Normal file
160
vendor/github.com/onsi/ginkgo/ginkgo/run_watch_and_build_command_flags.go
generated
vendored
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"runtime"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RunWatchAndBuildCommandFlags struct {
|
||||||
|
Recurse bool
|
||||||
|
SkipPackage string
|
||||||
|
GoOpts map[string]interface{}
|
||||||
|
|
||||||
|
//for run and watch commands
|
||||||
|
NumCPU int
|
||||||
|
NumCompilers int
|
||||||
|
ParallelStream bool
|
||||||
|
Notify bool
|
||||||
|
AfterSuiteHook string
|
||||||
|
AutoNodes bool
|
||||||
|
|
||||||
|
//only for run command
|
||||||
|
KeepGoing bool
|
||||||
|
UntilItFails bool
|
||||||
|
RandomizeSuites bool
|
||||||
|
|
||||||
|
//only for watch command
|
||||||
|
Depth int
|
||||||
|
|
||||||
|
FlagSet *flag.FlagSet
|
||||||
|
}
|
||||||
|
|
||||||
|
const runMode = 1
|
||||||
|
const watchMode = 2
|
||||||
|
const buildMode = 3
|
||||||
|
|
||||||
|
func NewRunCommandFlags(flagSet *flag.FlagSet) *RunWatchAndBuildCommandFlags {
|
||||||
|
c := &RunWatchAndBuildCommandFlags{
|
||||||
|
FlagSet: flagSet,
|
||||||
|
}
|
||||||
|
c.flags(runMode)
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewWatchCommandFlags(flagSet *flag.FlagSet) *RunWatchAndBuildCommandFlags {
|
||||||
|
c := &RunWatchAndBuildCommandFlags{
|
||||||
|
FlagSet: flagSet,
|
||||||
|
}
|
||||||
|
c.flags(watchMode)
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBuildCommandFlags(flagSet *flag.FlagSet) *RunWatchAndBuildCommandFlags {
|
||||||
|
c := &RunWatchAndBuildCommandFlags{
|
||||||
|
FlagSet: flagSet,
|
||||||
|
}
|
||||||
|
c.flags(buildMode)
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RunWatchAndBuildCommandFlags) wasSet(flagName string) bool {
|
||||||
|
wasSet := false
|
||||||
|
c.FlagSet.Visit(func(f *flag.Flag) {
|
||||||
|
if f.Name == flagName {
|
||||||
|
wasSet = true
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return wasSet
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RunWatchAndBuildCommandFlags) computeNodes() {
|
||||||
|
if c.wasSet("nodes") {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if c.AutoNodes {
|
||||||
|
switch n := runtime.NumCPU(); {
|
||||||
|
case n <= 4:
|
||||||
|
c.NumCPU = n
|
||||||
|
default:
|
||||||
|
c.NumCPU = n - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RunWatchAndBuildCommandFlags) stringSlot(slot string) *string {
|
||||||
|
var opt string
|
||||||
|
c.GoOpts[slot] = &opt
|
||||||
|
return &opt
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RunWatchAndBuildCommandFlags) boolSlot(slot string) *bool {
|
||||||
|
var opt bool
|
||||||
|
c.GoOpts[slot] = &opt
|
||||||
|
return &opt
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RunWatchAndBuildCommandFlags) intSlot(slot string) *int {
|
||||||
|
var opt int
|
||||||
|
c.GoOpts[slot] = &opt
|
||||||
|
return &opt
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RunWatchAndBuildCommandFlags) flags(mode int) {
|
||||||
|
c.GoOpts = make(map[string]interface{})
|
||||||
|
|
||||||
|
onWindows := (runtime.GOOS == "windows")
|
||||||
|
|
||||||
|
c.FlagSet.BoolVar(&(c.Recurse), "r", false, "Find and run test suites under the current directory recursively.")
|
||||||
|
c.FlagSet.BoolVar(c.boolSlot("race"), "race", false, "Run tests with race detection enabled.")
|
||||||
|
c.FlagSet.BoolVar(c.boolSlot("cover"), "cover", false, "Run tests with coverage analysis, will generate coverage profiles with the package name in the current directory.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("coverpkg"), "coverpkg", "", "Run tests with coverage on the given external modules.")
|
||||||
|
c.FlagSet.StringVar(&(c.SkipPackage), "skipPackage", "", "A comma-separated list of package names to be skipped. If any part of the package's path matches, that package is ignored.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("tags"), "tags", "", "A list of build tags to consider satisfied during the build.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("gcflags"), "gcflags", "", "Arguments to pass on each go tool compile invocation.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("covermode"), "covermode", "", "Set the mode for coverage analysis.")
|
||||||
|
c.FlagSet.BoolVar(c.boolSlot("a"), "a", false, "Force rebuilding of packages that are already up-to-date.")
|
||||||
|
c.FlagSet.BoolVar(c.boolSlot("n"), "n", false, "Have `go test` print the commands but do not run them.")
|
||||||
|
c.FlagSet.BoolVar(c.boolSlot("msan"), "msan", false, "Enable interoperation with memory sanitizer.")
|
||||||
|
c.FlagSet.BoolVar(c.boolSlot("x"), "x", false, "Have `go test` print the commands.")
|
||||||
|
c.FlagSet.BoolVar(c.boolSlot("work"), "work", false, "Print the name of the temporary work directory and do not delete it when exiting.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("asmflags"), "asmflags", "", "Arguments to pass on each go tool asm invocation.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("buildmode"), "buildmode", "", "Build mode to use. See 'go help buildmode' for more.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("compiler"), "compiler", "", "Name of compiler to use, as in runtime.Compiler (gccgo or gc).")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("gccgoflags"), "gccgoflags", "", "Arguments to pass on each gccgo compiler/linker invocation.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("installsuffix"), "installsuffix", "", "A suffix to use in the name of the package installation directory.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("ldflags"), "ldflags", "", "Arguments to pass on each go tool link invocation.")
|
||||||
|
c.FlagSet.BoolVar(c.boolSlot("linkshared"), "linkshared", false, "Link against shared libraries previously created with -buildmode=shared.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("pkgdir"), "pkgdir", "", "install and load all packages from the given dir instead of the usual locations.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("toolexec"), "toolexec", "", "a program to use to invoke toolchain programs like vet and asm.")
|
||||||
|
c.FlagSet.IntVar(c.intSlot("blockprofilerate"), "blockprofilerate", 1, "Control the detail provided in goroutine blocking profiles by calling runtime.SetBlockProfileRate with the given value.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("coverprofile"), "coverprofile", "", "Write a coverage profile to the specified file after all tests have passed.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("cpuprofile"), "cpuprofile", "", "Write a CPU profile to the specified file before exiting.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("memprofile"), "memprofile", "", "Write a memory profile to the specified file after all tests have passed.")
|
||||||
|
c.FlagSet.IntVar(c.intSlot("memprofilerate"), "memprofilerate", 0, "Enable more precise (and expensive) memory profiles by setting runtime.MemProfileRate.")
|
||||||
|
c.FlagSet.StringVar(c.stringSlot("outputdir"), "outputdir", "", "Place output files from profiling in the specified directory.")
|
||||||
|
|
||||||
|
if mode == runMode || mode == watchMode {
|
||||||
|
config.Flags(c.FlagSet, "", false)
|
||||||
|
c.FlagSet.IntVar(&(c.NumCPU), "nodes", 1, "The number of parallel test nodes to run")
|
||||||
|
c.FlagSet.IntVar(&(c.NumCompilers), "compilers", 0, "The number of concurrent compilations to run (0 will autodetect)")
|
||||||
|
c.FlagSet.BoolVar(&(c.AutoNodes), "p", false, "Run in parallel with auto-detected number of nodes")
|
||||||
|
c.FlagSet.BoolVar(&(c.ParallelStream), "stream", onWindows, "stream parallel test output in real time: less coherent, but useful for debugging")
|
||||||
|
if !onWindows {
|
||||||
|
c.FlagSet.BoolVar(&(c.Notify), "notify", false, "Send desktop notifications when a test run completes")
|
||||||
|
}
|
||||||
|
c.FlagSet.StringVar(&(c.AfterSuiteHook), "afterSuiteHook", "", "Run a command when a suite test run completes")
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode == runMode {
|
||||||
|
c.FlagSet.BoolVar(&(c.KeepGoing), "keepGoing", false, "When true, failures from earlier test suites do not prevent later test suites from running")
|
||||||
|
c.FlagSet.BoolVar(&(c.UntilItFails), "untilItFails", false, "When true, Ginkgo will keep rerunning tests until a failure occurs")
|
||||||
|
c.FlagSet.BoolVar(&(c.RandomizeSuites), "randomizeSuites", false, "When true, Ginkgo will randomize the order in which test suites run")
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode == watchMode {
|
||||||
|
c.FlagSet.IntVar(&(c.Depth), "depth", 1, "Ginkgo will watch dependencies down to this depth in the dependency tree")
|
||||||
|
}
|
||||||
|
}
|
173
vendor/github.com/onsi/ginkgo/ginkgo/suite_runner.go
generated
vendored
Normal file
173
vendor/github.com/onsi/ginkgo/ginkgo/suite_runner.go
generated
vendored
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"runtime"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/interrupthandler"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testrunner"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
colorable "github.com/onsi/ginkgo/reporters/stenographer/support/go-colorable"
|
||||||
|
)
|
||||||
|
|
||||||
|
type compilationInput struct {
|
||||||
|
runner *testrunner.TestRunner
|
||||||
|
result chan compilationOutput
|
||||||
|
}
|
||||||
|
|
||||||
|
type compilationOutput struct {
|
||||||
|
runner *testrunner.TestRunner
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
type SuiteRunner struct {
|
||||||
|
notifier *Notifier
|
||||||
|
interruptHandler *interrupthandler.InterruptHandler
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSuiteRunner(notifier *Notifier, interruptHandler *interrupthandler.InterruptHandler) *SuiteRunner {
|
||||||
|
return &SuiteRunner{
|
||||||
|
notifier: notifier,
|
||||||
|
interruptHandler: interruptHandler,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SuiteRunner) compileInParallel(runners []*testrunner.TestRunner, numCompilers int, willCompile func(suite testsuite.TestSuite)) chan compilationOutput {
|
||||||
|
//we return this to the consumer, it will return each runner in order as it compiles
|
||||||
|
compilationOutputs := make(chan compilationOutput, len(runners))
|
||||||
|
|
||||||
|
//an array of channels - the nth runner's compilation output is sent to the nth channel in this array
|
||||||
|
//we read from these channels in order to ensure we run the suites in order
|
||||||
|
orderedCompilationOutputs := []chan compilationOutput{}
|
||||||
|
for _ = range runners {
|
||||||
|
orderedCompilationOutputs = append(orderedCompilationOutputs, make(chan compilationOutput, 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
//we're going to spin up numCompilers compilers - they're going to run concurrently and will consume this channel
|
||||||
|
//we prefill the channel then close it, this ensures we compile things in the correct order
|
||||||
|
workPool := make(chan compilationInput, len(runners))
|
||||||
|
for i, runner := range runners {
|
||||||
|
workPool <- compilationInput{runner, orderedCompilationOutputs[i]}
|
||||||
|
}
|
||||||
|
close(workPool)
|
||||||
|
|
||||||
|
//pick a reasonable numCompilers
|
||||||
|
if numCompilers == 0 {
|
||||||
|
numCompilers = runtime.NumCPU()
|
||||||
|
}
|
||||||
|
|
||||||
|
//a WaitGroup to help us wait for all compilers to shut down
|
||||||
|
wg := &sync.WaitGroup{}
|
||||||
|
wg.Add(numCompilers)
|
||||||
|
|
||||||
|
//spin up the concurrent compilers
|
||||||
|
for i := 0; i < numCompilers; i++ {
|
||||||
|
go func() {
|
||||||
|
defer wg.Done()
|
||||||
|
for input := range workPool {
|
||||||
|
if r.interruptHandler.WasInterrupted() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if willCompile != nil {
|
||||||
|
willCompile(input.runner.Suite)
|
||||||
|
}
|
||||||
|
|
||||||
|
//We retry because Go sometimes steps on itself when multiple compiles happen in parallel. This is ugly, but should help resolve flakiness...
|
||||||
|
var err error
|
||||||
|
retries := 0
|
||||||
|
for retries <= 5 {
|
||||||
|
if r.interruptHandler.WasInterrupted() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err = input.runner.Compile(); err == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
retries++
|
||||||
|
}
|
||||||
|
|
||||||
|
input.result <- compilationOutput{input.runner, err}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
//read from the compilation output channels *in order* and send them to the caller
|
||||||
|
//close the compilationOutputs channel to tell the caller we're done
|
||||||
|
go func() {
|
||||||
|
defer close(compilationOutputs)
|
||||||
|
for _, orderedCompilationOutput := range orderedCompilationOutputs {
|
||||||
|
select {
|
||||||
|
case compilationOutput := <-orderedCompilationOutput:
|
||||||
|
compilationOutputs <- compilationOutput
|
||||||
|
case <-r.interruptHandler.C:
|
||||||
|
//interrupt detected, wait for the compilers to shut down then bail
|
||||||
|
//this ensure we clean up after ourselves as we don't leave any compilation processes running
|
||||||
|
wg.Wait()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return compilationOutputs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SuiteRunner) RunSuites(runners []*testrunner.TestRunner, numCompilers int, keepGoing bool, willCompile func(suite testsuite.TestSuite)) (testrunner.RunResult, int) {
|
||||||
|
runResult := testrunner.PassingRunResult()
|
||||||
|
|
||||||
|
compilationOutputs := r.compileInParallel(runners, numCompilers, willCompile)
|
||||||
|
|
||||||
|
numSuitesThatRan := 0
|
||||||
|
suitesThatFailed := []testsuite.TestSuite{}
|
||||||
|
for compilationOutput := range compilationOutputs {
|
||||||
|
if compilationOutput.err != nil {
|
||||||
|
fmt.Print(compilationOutput.err.Error())
|
||||||
|
}
|
||||||
|
numSuitesThatRan++
|
||||||
|
suiteRunResult := testrunner.FailingRunResult()
|
||||||
|
if compilationOutput.err == nil {
|
||||||
|
suiteRunResult = compilationOutput.runner.Run()
|
||||||
|
}
|
||||||
|
r.notifier.SendSuiteCompletionNotification(compilationOutput.runner.Suite, suiteRunResult.Passed)
|
||||||
|
r.notifier.RunCommand(compilationOutput.runner.Suite, suiteRunResult.Passed)
|
||||||
|
runResult = runResult.Merge(suiteRunResult)
|
||||||
|
if !suiteRunResult.Passed {
|
||||||
|
suitesThatFailed = append(suitesThatFailed, compilationOutput.runner.Suite)
|
||||||
|
if !keepGoing {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if numSuitesThatRan < len(runners) && !config.DefaultReporterConfig.Succinct {
|
||||||
|
fmt.Println("")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if keepGoing && !runResult.Passed {
|
||||||
|
r.listFailedSuites(suitesThatFailed)
|
||||||
|
}
|
||||||
|
|
||||||
|
return runResult, numSuitesThatRan
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *SuiteRunner) listFailedSuites(suitesThatFailed []testsuite.TestSuite) {
|
||||||
|
fmt.Println("")
|
||||||
|
fmt.Println("There were failures detected in the following suites:")
|
||||||
|
|
||||||
|
maxPackageNameLength := 0
|
||||||
|
for _, suite := range suitesThatFailed {
|
||||||
|
if len(suite.PackageName) > maxPackageNameLength {
|
||||||
|
maxPackageNameLength = len(suite.PackageName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
packageNameFormatter := fmt.Sprintf("%%%ds", maxPackageNameLength)
|
||||||
|
|
||||||
|
for _, suite := range suitesThatFailed {
|
||||||
|
if config.DefaultReporterConfig.NoColor {
|
||||||
|
fmt.Printf("\t"+packageNameFormatter+" %s\n", suite.PackageName, suite.Path)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(colorable.NewColorableStdout(), "\t%s"+packageNameFormatter+"%s %s%s%s\n", redColor, suite.PackageName, defaultStyle, lightGrayColor, suite.Path, defaultStyle)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
52
vendor/github.com/onsi/ginkgo/ginkgo/testrunner/log_writer.go
generated
vendored
Normal file
52
vendor/github.com/onsi/ginkgo/ginkgo/testrunner/log_writer.go
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
package testrunner
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
type logWriter struct {
|
||||||
|
buffer *bytes.Buffer
|
||||||
|
lock *sync.Mutex
|
||||||
|
log *log.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func newLogWriter(target io.Writer, node int) *logWriter {
|
||||||
|
return &logWriter{
|
||||||
|
buffer: &bytes.Buffer{},
|
||||||
|
lock: &sync.Mutex{},
|
||||||
|
log: log.New(target, fmt.Sprintf("[%d] ", node), 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *logWriter) Write(data []byte) (n int, err error) {
|
||||||
|
w.lock.Lock()
|
||||||
|
defer w.lock.Unlock()
|
||||||
|
|
||||||
|
w.buffer.Write(data)
|
||||||
|
contents := w.buffer.String()
|
||||||
|
|
||||||
|
lines := strings.Split(contents, "\n")
|
||||||
|
for _, line := range lines[0 : len(lines)-1] {
|
||||||
|
w.log.Println(line)
|
||||||
|
}
|
||||||
|
|
||||||
|
w.buffer.Reset()
|
||||||
|
w.buffer.Write([]byte(lines[len(lines)-1]))
|
||||||
|
return len(data), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *logWriter) Close() error {
|
||||||
|
w.lock.Lock()
|
||||||
|
defer w.lock.Unlock()
|
||||||
|
|
||||||
|
if w.buffer.Len() > 0 {
|
||||||
|
w.log.Println(w.buffer.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
27
vendor/github.com/onsi/ginkgo/ginkgo/testrunner/run_result.go
generated
vendored
Normal file
27
vendor/github.com/onsi/ginkgo/ginkgo/testrunner/run_result.go
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
package testrunner
|
||||||
|
|
||||||
|
type RunResult struct {
|
||||||
|
Passed bool
|
||||||
|
HasProgrammaticFocus bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func PassingRunResult() RunResult {
|
||||||
|
return RunResult{
|
||||||
|
Passed: true,
|
||||||
|
HasProgrammaticFocus: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func FailingRunResult() RunResult {
|
||||||
|
return RunResult{
|
||||||
|
Passed: false,
|
||||||
|
HasProgrammaticFocus: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r RunResult) Merge(o RunResult) RunResult {
|
||||||
|
return RunResult{
|
||||||
|
Passed: r.Passed && o.Passed,
|
||||||
|
HasProgrammaticFocus: r.HasProgrammaticFocus || o.HasProgrammaticFocus,
|
||||||
|
}
|
||||||
|
}
|
506
vendor/github.com/onsi/ginkgo/ginkgo/testrunner/test_runner.go
generated
vendored
Normal file
506
vendor/github.com/onsi/ginkgo/ginkgo/testrunner/test_runner.go
generated
vendored
Normal file
@ -0,0 +1,506 @@
|
|||||||
|
package testrunner
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
"github.com/onsi/ginkgo/internal/remote"
|
||||||
|
"github.com/onsi/ginkgo/reporters/stenographer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TestRunner struct {
|
||||||
|
Suite testsuite.TestSuite
|
||||||
|
|
||||||
|
compiled bool
|
||||||
|
compilationTargetPath string
|
||||||
|
|
||||||
|
numCPU int
|
||||||
|
parallelStream bool
|
||||||
|
goOpts map[string]interface{}
|
||||||
|
additionalArgs []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(suite testsuite.TestSuite, numCPU int, parallelStream bool, goOpts map[string]interface{}, additionalArgs []string) *TestRunner {
|
||||||
|
runner := &TestRunner{
|
||||||
|
Suite: suite,
|
||||||
|
numCPU: numCPU,
|
||||||
|
parallelStream: parallelStream,
|
||||||
|
goOpts: goOpts,
|
||||||
|
additionalArgs: additionalArgs,
|
||||||
|
}
|
||||||
|
|
||||||
|
if !suite.Precompiled {
|
||||||
|
dir, err := ioutil.TempDir("", "ginkgo")
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("couldn't create temporary directory... might be time to rm -rf:\n%s", err.Error()))
|
||||||
|
}
|
||||||
|
runner.compilationTargetPath = filepath.Join(dir, suite.PackageName+".test")
|
||||||
|
}
|
||||||
|
|
||||||
|
return runner
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) Compile() error {
|
||||||
|
return t.CompileTo(t.compilationTargetPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) BuildArgs(path string) []string {
|
||||||
|
args := []string{"test", "-c", "-i", "-o", path, t.Suite.Path}
|
||||||
|
|
||||||
|
if *t.goOpts["covermode"].(*string) != "" {
|
||||||
|
args = append(args, "-cover", fmt.Sprintf("-covermode=%s", *t.goOpts["covermode"].(*string)))
|
||||||
|
} else {
|
||||||
|
if *t.goOpts["cover"].(*bool) || *t.goOpts["coverpkg"].(*string) != "" {
|
||||||
|
args = append(args, "-cover", "-covermode=atomic")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
boolOpts := []string{
|
||||||
|
"a",
|
||||||
|
"n",
|
||||||
|
"msan",
|
||||||
|
"race",
|
||||||
|
"x",
|
||||||
|
"work",
|
||||||
|
"linkshared",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, opt := range boolOpts {
|
||||||
|
if s, found := t.goOpts[opt].(*bool); found && *s {
|
||||||
|
args = append(args, fmt.Sprintf("-%s", opt))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
intOpts := []string{
|
||||||
|
"memprofilerate",
|
||||||
|
"blockprofilerate",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, opt := range intOpts {
|
||||||
|
if s, found := t.goOpts[opt].(*int); found {
|
||||||
|
args = append(args, fmt.Sprintf("-%s=%d", opt, *s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stringOpts := []string{
|
||||||
|
"asmflags",
|
||||||
|
"buildmode",
|
||||||
|
"compiler",
|
||||||
|
"gccgoflags",
|
||||||
|
"installsuffix",
|
||||||
|
"ldflags",
|
||||||
|
"pkgdir",
|
||||||
|
"toolexec",
|
||||||
|
"coverprofile",
|
||||||
|
"cpuprofile",
|
||||||
|
"memprofile",
|
||||||
|
"outputdir",
|
||||||
|
"coverpkg",
|
||||||
|
"tags",
|
||||||
|
"gcflags",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, opt := range stringOpts {
|
||||||
|
if s, found := t.goOpts[opt].(*string); found && *s != "" {
|
||||||
|
args = append(args, fmt.Sprintf("-%s=%s", opt, *s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return args
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) CompileTo(path string) error {
|
||||||
|
if t.compiled {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.Suite.Precompiled {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
args := t.BuildArgs(path)
|
||||||
|
cmd := exec.Command("go", args...)
|
||||||
|
|
||||||
|
output, err := cmd.CombinedOutput()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fixedOutput := fixCompilationOutput(string(output), t.Suite.Path)
|
||||||
|
if len(output) > 0 {
|
||||||
|
return fmt.Errorf("Failed to compile %s:\n\n%s", t.Suite.PackageName, fixedOutput)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("Failed to compile %s", t.Suite.PackageName)
|
||||||
|
}
|
||||||
|
|
||||||
|
if fileExists(path) == false {
|
||||||
|
compiledFile := t.Suite.PackageName + ".test"
|
||||||
|
if fileExists(compiledFile) {
|
||||||
|
// seems like we are on an old go version that does not support the -o flag on go test
|
||||||
|
// move the compiled test file to the desired location by hand
|
||||||
|
err = os.Rename(compiledFile, path)
|
||||||
|
if err != nil {
|
||||||
|
// We cannot move the file, perhaps because the source and destination
|
||||||
|
// are on different partitions. We can copy the file, however.
|
||||||
|
err = copyFile(compiledFile, path)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Failed to copy compiled file: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("Failed to compile %s: output file %q could not be found", t.Suite.PackageName, path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
t.compiled = true
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileExists(path string) bool {
|
||||||
|
_, err := os.Stat(path)
|
||||||
|
return err == nil || os.IsNotExist(err) == false
|
||||||
|
}
|
||||||
|
|
||||||
|
// copyFile copies the contents of the file named src to the file named
|
||||||
|
// by dst. The file will be created if it does not already exist. If the
|
||||||
|
// destination file exists, all it's contents will be replaced by the contents
|
||||||
|
// of the source file.
|
||||||
|
func copyFile(src, dst string) error {
|
||||||
|
srcInfo, err := os.Stat(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
mode := srcInfo.Mode()
|
||||||
|
|
||||||
|
in, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer in.Close()
|
||||||
|
|
||||||
|
out, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
closeErr := out.Close()
|
||||||
|
if err == nil {
|
||||||
|
err = closeErr
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
_, err = io.Copy(out, in)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = out.Sync()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return out.Chmod(mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
go test -c -i spits package.test out into the cwd. there's no way to change this.
|
||||||
|
|
||||||
|
to make sure it doesn't generate conflicting .test files in the cwd, Compile() must switch the cwd to the test package.
|
||||||
|
|
||||||
|
unfortunately, this causes go test's compile output to be expressed *relative to the test package* instead of the cwd.
|
||||||
|
|
||||||
|
this makes it hard to reason about what failed, and also prevents iterm's Cmd+click from working.
|
||||||
|
|
||||||
|
fixCompilationOutput..... rewrites the output to fix the paths.
|
||||||
|
|
||||||
|
yeah......
|
||||||
|
*/
|
||||||
|
func fixCompilationOutput(output string, relToPath string) string {
|
||||||
|
relToPath = filepath.Join(relToPath)
|
||||||
|
re := regexp.MustCompile(`^(\S.*\.go)\:\d+\:`)
|
||||||
|
lines := strings.Split(output, "\n")
|
||||||
|
for i, line := range lines {
|
||||||
|
indices := re.FindStringSubmatchIndex(line)
|
||||||
|
if len(indices) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
path := line[indices[2]:indices[3]]
|
||||||
|
if filepath.Dir(path) != relToPath {
|
||||||
|
path = filepath.Join(relToPath, path)
|
||||||
|
lines[i] = path + line[indices[3]:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(lines, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) Run() RunResult {
|
||||||
|
if t.Suite.IsGinkgo {
|
||||||
|
if t.numCPU > 1 {
|
||||||
|
if t.parallelStream {
|
||||||
|
return t.runAndStreamParallelGinkgoSuite()
|
||||||
|
} else {
|
||||||
|
return t.runParallelGinkgoSuite()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return t.runSerialGinkgoSuite()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return t.runGoTestSuite()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) CleanUp() {
|
||||||
|
if t.Suite.Precompiled {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
os.RemoveAll(filepath.Dir(t.compilationTargetPath))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) runSerialGinkgoSuite() RunResult {
|
||||||
|
ginkgoArgs := config.BuildFlagArgs("ginkgo", config.GinkgoConfig, config.DefaultReporterConfig)
|
||||||
|
return t.run(t.cmd(ginkgoArgs, os.Stdout, 1), nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) runGoTestSuite() RunResult {
|
||||||
|
return t.run(t.cmd([]string{"-test.v"}, os.Stdout, 1), nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) runAndStreamParallelGinkgoSuite() RunResult {
|
||||||
|
completions := make(chan RunResult)
|
||||||
|
writers := make([]*logWriter, t.numCPU)
|
||||||
|
|
||||||
|
server, err := remote.NewServer(t.numCPU)
|
||||||
|
if err != nil {
|
||||||
|
panic("Failed to start parallel spec server")
|
||||||
|
}
|
||||||
|
|
||||||
|
server.Start()
|
||||||
|
defer server.Close()
|
||||||
|
|
||||||
|
for cpu := 0; cpu < t.numCPU; cpu++ {
|
||||||
|
config.GinkgoConfig.ParallelNode = cpu + 1
|
||||||
|
config.GinkgoConfig.ParallelTotal = t.numCPU
|
||||||
|
config.GinkgoConfig.SyncHost = server.Address()
|
||||||
|
|
||||||
|
ginkgoArgs := config.BuildFlagArgs("ginkgo", config.GinkgoConfig, config.DefaultReporterConfig)
|
||||||
|
|
||||||
|
writers[cpu] = newLogWriter(os.Stdout, cpu+1)
|
||||||
|
|
||||||
|
cmd := t.cmd(ginkgoArgs, writers[cpu], cpu+1)
|
||||||
|
|
||||||
|
server.RegisterAlive(cpu+1, func() bool {
|
||||||
|
if cmd.ProcessState == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return !cmd.ProcessState.Exited()
|
||||||
|
})
|
||||||
|
|
||||||
|
go t.run(cmd, completions)
|
||||||
|
}
|
||||||
|
|
||||||
|
res := PassingRunResult()
|
||||||
|
|
||||||
|
for cpu := 0; cpu < t.numCPU; cpu++ {
|
||||||
|
res = res.Merge(<-completions)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, writer := range writers {
|
||||||
|
writer.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
os.Stdout.Sync()
|
||||||
|
|
||||||
|
if *t.goOpts["cover"].(*bool) || *t.goOpts["coverpkg"].(*string) != "" || *t.goOpts["covermode"].(*string) != "" {
|
||||||
|
t.combineCoverprofiles()
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) runParallelGinkgoSuite() RunResult {
|
||||||
|
result := make(chan bool)
|
||||||
|
completions := make(chan RunResult)
|
||||||
|
writers := make([]*logWriter, t.numCPU)
|
||||||
|
reports := make([]*bytes.Buffer, t.numCPU)
|
||||||
|
|
||||||
|
stenographer := stenographer.New(!config.DefaultReporterConfig.NoColor, config.GinkgoConfig.FlakeAttempts > 1)
|
||||||
|
aggregator := remote.NewAggregator(t.numCPU, result, config.DefaultReporterConfig, stenographer)
|
||||||
|
|
||||||
|
server, err := remote.NewServer(t.numCPU)
|
||||||
|
if err != nil {
|
||||||
|
panic("Failed to start parallel spec server")
|
||||||
|
}
|
||||||
|
server.RegisterReporters(aggregator)
|
||||||
|
server.Start()
|
||||||
|
defer server.Close()
|
||||||
|
|
||||||
|
for cpu := 0; cpu < t.numCPU; cpu++ {
|
||||||
|
config.GinkgoConfig.ParallelNode = cpu + 1
|
||||||
|
config.GinkgoConfig.ParallelTotal = t.numCPU
|
||||||
|
config.GinkgoConfig.SyncHost = server.Address()
|
||||||
|
config.GinkgoConfig.StreamHost = server.Address()
|
||||||
|
|
||||||
|
ginkgoArgs := config.BuildFlagArgs("ginkgo", config.GinkgoConfig, config.DefaultReporterConfig)
|
||||||
|
|
||||||
|
reports[cpu] = &bytes.Buffer{}
|
||||||
|
writers[cpu] = newLogWriter(reports[cpu], cpu+1)
|
||||||
|
|
||||||
|
cmd := t.cmd(ginkgoArgs, writers[cpu], cpu+1)
|
||||||
|
|
||||||
|
server.RegisterAlive(cpu+1, func() bool {
|
||||||
|
if cmd.ProcessState == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return !cmd.ProcessState.Exited()
|
||||||
|
})
|
||||||
|
|
||||||
|
go t.run(cmd, completions)
|
||||||
|
}
|
||||||
|
|
||||||
|
res := PassingRunResult()
|
||||||
|
|
||||||
|
for cpu := 0; cpu < t.numCPU; cpu++ {
|
||||||
|
res = res.Merge(<-completions)
|
||||||
|
}
|
||||||
|
|
||||||
|
//all test processes are done, at this point
|
||||||
|
//we should be able to wait for the aggregator to tell us that it's done
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-result:
|
||||||
|
fmt.Println("")
|
||||||
|
case <-time.After(time.Second):
|
||||||
|
//the aggregator never got back to us! something must have gone wrong
|
||||||
|
fmt.Println(`
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
| |
|
||||||
|
| Ginkgo timed out waiting for all parallel nodes to report back! |
|
||||||
|
| |
|
||||||
|
-------------------------------------------------------------------
|
||||||
|
`)
|
||||||
|
|
||||||
|
os.Stdout.Sync()
|
||||||
|
|
||||||
|
for _, writer := range writers {
|
||||||
|
writer.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, report := range reports {
|
||||||
|
fmt.Print(report.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
os.Stdout.Sync()
|
||||||
|
}
|
||||||
|
|
||||||
|
if *t.goOpts["cover"].(*bool) || *t.goOpts["coverpkg"].(*string) != "" || *t.goOpts["covermode"].(*string) != "" {
|
||||||
|
t.combineCoverprofiles()
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) cmd(ginkgoArgs []string, stream io.Writer, node int) *exec.Cmd {
|
||||||
|
args := []string{"--test.timeout=24h"}
|
||||||
|
if *t.goOpts["cover"].(*bool) || *t.goOpts["coverpkg"].(*string) != "" || *t.goOpts["covermode"].(*string) != "" {
|
||||||
|
coverprofile := "--test.coverprofile=" + t.Suite.PackageName + ".coverprofile"
|
||||||
|
if t.numCPU > 1 {
|
||||||
|
coverprofile = fmt.Sprintf("%s.%d", coverprofile, node)
|
||||||
|
}
|
||||||
|
args = append(args, coverprofile)
|
||||||
|
}
|
||||||
|
|
||||||
|
args = append(args, ginkgoArgs...)
|
||||||
|
args = append(args, t.additionalArgs...)
|
||||||
|
|
||||||
|
path := t.compilationTargetPath
|
||||||
|
if t.Suite.Precompiled {
|
||||||
|
path, _ = filepath.Abs(filepath.Join(t.Suite.Path, fmt.Sprintf("%s.test", t.Suite.PackageName)))
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := exec.Command(path, args...)
|
||||||
|
|
||||||
|
cmd.Dir = t.Suite.Path
|
||||||
|
cmd.Stderr = stream
|
||||||
|
cmd.Stdout = stream
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) run(cmd *exec.Cmd, completions chan RunResult) RunResult {
|
||||||
|
var res RunResult
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if completions != nil {
|
||||||
|
completions <- res
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
err := cmd.Start()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Failed to run test suite!\n\t%s", err.Error())
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd.Wait()
|
||||||
|
exitStatus := cmd.ProcessState.Sys().(syscall.WaitStatus).ExitStatus()
|
||||||
|
res.Passed = (exitStatus == 0) || (exitStatus == types.GINKGO_FOCUS_EXIT_CODE)
|
||||||
|
res.HasProgrammaticFocus = (exitStatus == types.GINKGO_FOCUS_EXIT_CODE)
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TestRunner) combineCoverprofiles() {
|
||||||
|
profiles := []string{}
|
||||||
|
for cpu := 1; cpu <= t.numCPU; cpu++ {
|
||||||
|
coverFile := fmt.Sprintf("%s.coverprofile.%d", t.Suite.PackageName, cpu)
|
||||||
|
coverFile = filepath.Join(t.Suite.Path, coverFile)
|
||||||
|
coverProfile, err := ioutil.ReadFile(coverFile)
|
||||||
|
os.Remove(coverFile)
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
profiles = append(profiles, string(coverProfile))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(profiles) != t.numCPU {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
lines := map[string]int{}
|
||||||
|
lineOrder := []string{}
|
||||||
|
for i, coverProfile := range profiles {
|
||||||
|
for _, line := range strings.Split(string(coverProfile), "\n")[1:] {
|
||||||
|
if len(line) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
components := strings.Split(line, " ")
|
||||||
|
count, _ := strconv.Atoi(components[len(components)-1])
|
||||||
|
prefix := strings.Join(components[0:len(components)-1], " ")
|
||||||
|
lines[prefix] += count
|
||||||
|
if i == 0 {
|
||||||
|
lineOrder = append(lineOrder, prefix)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output := []string{"mode: atomic"}
|
||||||
|
for _, line := range lineOrder {
|
||||||
|
output = append(output, fmt.Sprintf("%s %d", line, lines[line]))
|
||||||
|
}
|
||||||
|
finalOutput := strings.Join(output, "\n")
|
||||||
|
ioutil.WriteFile(filepath.Join(t.Suite.Path, fmt.Sprintf("%s.coverprofile", t.Suite.PackageName)), []byte(finalOutput), 0666)
|
||||||
|
}
|
56
vendor/github.com/onsi/ginkgo/ginkgo/testrunner/test_runner_test.go
generated
vendored
Normal file
56
vendor/github.com/onsi/ginkgo/ginkgo/testrunner/test_runner_test.go
generated
vendored
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
package testrunner_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testrunner"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func strAddr(s string) interface{} {
|
||||||
|
return &s
|
||||||
|
}
|
||||||
|
|
||||||
|
func boolAddr(s bool) interface{} {
|
||||||
|
return &s
|
||||||
|
}
|
||||||
|
|
||||||
|
func intAddr(s int) interface{} {
|
||||||
|
return &s
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ = Describe("TestRunner", func() {
|
||||||
|
It("should pass through go opts", func() {
|
||||||
|
//var opts map[string]interface{}
|
||||||
|
opts := map[string]interface{}{
|
||||||
|
"asmflags": strAddr("a"),
|
||||||
|
"pkgdir": strAddr("b"),
|
||||||
|
"gcflags": strAddr("c"),
|
||||||
|
"covermode": strAddr(""),
|
||||||
|
"coverpkg": strAddr(""),
|
||||||
|
"cover": boolAddr(false),
|
||||||
|
"blockprofilerate": intAddr(100),
|
||||||
|
}
|
||||||
|
tr := testrunner.New(testsuite.TestSuite{}, 1, false, opts, []string{})
|
||||||
|
|
||||||
|
args := tr.BuildArgs(".")
|
||||||
|
Ω(args).Should(Equal([]string{
|
||||||
|
"test",
|
||||||
|
"-c",
|
||||||
|
"-i",
|
||||||
|
"-o",
|
||||||
|
".",
|
||||||
|
"",
|
||||||
|
"-blockprofilerate=100",
|
||||||
|
"-asmflags=a",
|
||||||
|
"-pkgdir=b",
|
||||||
|
"-gcflags=c",
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
func TestTestRunner(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Test Runner Suite")
|
||||||
|
}
|
111
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/test_suite.go
generated
vendored
Normal file
111
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/test_suite.go
generated
vendored
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
package testsuite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TestSuite struct {
|
||||||
|
Path string
|
||||||
|
PackageName string
|
||||||
|
IsGinkgo bool
|
||||||
|
Precompiled bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func PrecompiledTestSuite(path string) (TestSuite, error) {
|
||||||
|
info, err := os.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
return TestSuite{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
return TestSuite{}, errors.New("this is a directory, not a file")
|
||||||
|
}
|
||||||
|
|
||||||
|
if filepath.Ext(path) != ".test" {
|
||||||
|
return TestSuite{}, errors.New("this is not a .test binary")
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.Mode()&0111 == 0 {
|
||||||
|
return TestSuite{}, errors.New("this is not executable")
|
||||||
|
}
|
||||||
|
|
||||||
|
dir := relPath(filepath.Dir(path))
|
||||||
|
packageName := strings.TrimSuffix(filepath.Base(path), filepath.Ext(path))
|
||||||
|
|
||||||
|
return TestSuite{
|
||||||
|
Path: dir,
|
||||||
|
PackageName: packageName,
|
||||||
|
IsGinkgo: true,
|
||||||
|
Precompiled: true,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SuitesInDir(dir string, recurse bool) []TestSuite {
|
||||||
|
suites := []TestSuite{}
|
||||||
|
|
||||||
|
if vendorExperimentCheck(dir) {
|
||||||
|
return suites
|
||||||
|
}
|
||||||
|
|
||||||
|
files, _ := ioutil.ReadDir(dir)
|
||||||
|
re := regexp.MustCompile(`_test\.go$`)
|
||||||
|
for _, file := range files {
|
||||||
|
if !file.IsDir() && re.Match([]byte(file.Name())) {
|
||||||
|
suites = append(suites, New(dir, files))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if recurse {
|
||||||
|
re = regexp.MustCompile(`^[._]`)
|
||||||
|
for _, file := range files {
|
||||||
|
if file.IsDir() && !re.Match([]byte(file.Name())) {
|
||||||
|
suites = append(suites, SuitesInDir(dir+"/"+file.Name(), recurse)...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return suites
|
||||||
|
}
|
||||||
|
|
||||||
|
func relPath(dir string) string {
|
||||||
|
dir, _ = filepath.Abs(dir)
|
||||||
|
cwd, _ := os.Getwd()
|
||||||
|
dir, _ = filepath.Rel(cwd, filepath.Clean(dir))
|
||||||
|
dir = "." + string(filepath.Separator) + dir
|
||||||
|
return dir
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(dir string, files []os.FileInfo) TestSuite {
|
||||||
|
return TestSuite{
|
||||||
|
Path: relPath(dir),
|
||||||
|
PackageName: packageNameForSuite(dir),
|
||||||
|
IsGinkgo: filesHaveGinkgoSuite(dir, files),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func packageNameForSuite(dir string) string {
|
||||||
|
path, _ := filepath.Abs(dir)
|
||||||
|
return filepath.Base(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func filesHaveGinkgoSuite(dir string, files []os.FileInfo) bool {
|
||||||
|
reTestFile := regexp.MustCompile(`_test\.go$`)
|
||||||
|
reGinkgo := regexp.MustCompile(`package ginkgo|\/ginkgo"`)
|
||||||
|
|
||||||
|
for _, file := range files {
|
||||||
|
if !file.IsDir() && reTestFile.Match([]byte(file.Name())) {
|
||||||
|
contents, _ := ioutil.ReadFile(dir + "/" + file.Name())
|
||||||
|
if reGinkgo.Match(contents) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
13
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_suite_test.go
generated
vendored
Normal file
13
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_suite_test.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package testsuite_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestTestsuite(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Testsuite Suite")
|
||||||
|
}
|
202
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_test.go
generated
vendored
Normal file
202
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_test.go
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
// +build go1.6
|
||||||
|
|
||||||
|
package testsuite_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("TestSuite", func() {
|
||||||
|
var tmpDir string
|
||||||
|
var relTmpDir string
|
||||||
|
|
||||||
|
writeFile := func(folder string, filename string, content string, mode os.FileMode) {
|
||||||
|
path := filepath.Join(tmpDir, folder)
|
||||||
|
err := os.MkdirAll(path, 0700)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
path = filepath.Join(path, filename)
|
||||||
|
ioutil.WriteFile(path, []byte(content), mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
var origVendor string
|
||||||
|
|
||||||
|
BeforeSuite(func() {
|
||||||
|
origVendor = os.Getenv("GO15VENDOREXPERIMENT")
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterSuite(func() {
|
||||||
|
os.Setenv("GO15VENDOREXPERIMENT", origVendor)
|
||||||
|
})
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
var err error
|
||||||
|
tmpDir, err = ioutil.TempDir("/tmp", "ginkgo")
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
cwd, err := os.Getwd()
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
relTmpDir, err = filepath.Rel(cwd, tmpDir)
|
||||||
|
relTmpDir = "./" + relTmpDir
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
//go files in the root directory (no tests)
|
||||||
|
writeFile("/", "main.go", "package main", 0666)
|
||||||
|
|
||||||
|
//non-go files in a nested directory
|
||||||
|
writeFile("/redherring", "big_test.jpg", "package ginkgo", 0666)
|
||||||
|
|
||||||
|
//non-ginkgo tests in a nested directory
|
||||||
|
writeFile("/professorplum", "professorplum_test.go", `import "testing"`, 0666)
|
||||||
|
|
||||||
|
//ginkgo tests in a nested directory
|
||||||
|
writeFile("/colonelmustard", "colonelmustard_test.go", `import "github.com/onsi/ginkgo"`, 0666)
|
||||||
|
|
||||||
|
//ginkgo tests in a deeply nested directory
|
||||||
|
writeFile("/colonelmustard/library", "library_test.go", `import "github.com/onsi/ginkgo"`, 0666)
|
||||||
|
|
||||||
|
//ginkgo tests deeply nested in a vendored dependency
|
||||||
|
writeFile("/vendor/mrspeacock/lounge", "lounge_test.go", `import "github.com/onsi/ginkgo"`, 0666)
|
||||||
|
|
||||||
|
//a precompiled ginkgo test
|
||||||
|
writeFile("/precompiled-dir", "precompiled.test", `fake-binary-file`, 0777)
|
||||||
|
writeFile("/precompiled-dir", "some-other-binary", `fake-binary-file`, 0777)
|
||||||
|
writeFile("/precompiled-dir", "nonexecutable.test", `fake-binary-file`, 0666)
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
os.RemoveAll(tmpDir)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Finding precompiled test suites", func() {
|
||||||
|
Context("if pointed at an executable file that ends with .test", func() {
|
||||||
|
It("should return a precompiled test suite", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "precompiled.test"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(suite).Should(Equal(TestSuite{
|
||||||
|
Path: relTmpDir + "/precompiled-dir",
|
||||||
|
PackageName: "precompiled",
|
||||||
|
IsGinkgo: true,
|
||||||
|
Precompiled: true,
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if pointed at a directory", func() {
|
||||||
|
It("should error", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir"))
|
||||||
|
Ω(suite).Should(BeZero())
|
||||||
|
Ω(err).Should(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if pointed at an executable that doesn't have .test", func() {
|
||||||
|
It("should error", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "some-other-binary"))
|
||||||
|
Ω(suite).Should(BeZero())
|
||||||
|
Ω(err).Should(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if pointed at a .test that isn't executable", func() {
|
||||||
|
It("should error", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "nonexecutable.test"))
|
||||||
|
Ω(suite).Should(BeZero())
|
||||||
|
Ω(err).Should(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if pointed at a nonexisting file", func() {
|
||||||
|
It("should error", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "nope-nothing-to-see-here"))
|
||||||
|
Ω(suite).Should(BeZero())
|
||||||
|
Ω(err).Should(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("scanning for suites in a directory", func() {
|
||||||
|
Context("when there are no tests in the specified directory", func() {
|
||||||
|
It("should come up empty", func() {
|
||||||
|
suites := SuitesInDir(tmpDir, false)
|
||||||
|
Ω(suites).Should(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when there are ginkgo tests in the specified directory", func() {
|
||||||
|
It("should return an appropriately configured suite", func() {
|
||||||
|
suites := SuitesInDir(filepath.Join(tmpDir, "colonelmustard"), false)
|
||||||
|
Ω(suites).Should(HaveLen(1))
|
||||||
|
|
||||||
|
Ω(suites[0].Path).Should(Equal(relTmpDir + "/colonelmustard"))
|
||||||
|
Ω(suites[0].PackageName).Should(Equal("colonelmustard"))
|
||||||
|
Ω(suites[0].IsGinkgo).Should(BeTrue())
|
||||||
|
Ω(suites[0].Precompiled).Should(BeFalse())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when there are non-ginkgo tests in the specified directory", func() {
|
||||||
|
It("should return an appropriately configured suite", func() {
|
||||||
|
suites := SuitesInDir(filepath.Join(tmpDir, "professorplum"), false)
|
||||||
|
Ω(suites).Should(HaveLen(1))
|
||||||
|
|
||||||
|
Ω(suites[0].Path).Should(Equal(relTmpDir + "/professorplum"))
|
||||||
|
Ω(suites[0].PackageName).Should(Equal("professorplum"))
|
||||||
|
Ω(suites[0].IsGinkgo).Should(BeFalse())
|
||||||
|
Ω(suites[0].Precompiled).Should(BeFalse())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("given GO15VENDOREXPERIMENT disabled", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
os.Setenv("GO15VENDOREXPERIMENT", "0")
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
os.Setenv("GO15VENDOREXPERIMENT", "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not skip vendor dirs", func() {
|
||||||
|
suites := SuitesInDir(filepath.Join(tmpDir+"/vendor"), true)
|
||||||
|
Ω(suites).Should(HaveLen(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should recurse into vendor dirs", func() {
|
||||||
|
suites := SuitesInDir(filepath.Join(tmpDir), true)
|
||||||
|
Ω(suites).Should(HaveLen(4))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when recursively scanning", func() {
|
||||||
|
It("should return suites for corresponding test suites, only", func() {
|
||||||
|
suites := SuitesInDir(tmpDir, true)
|
||||||
|
Ω(suites).Should(HaveLen(3))
|
||||||
|
|
||||||
|
Ω(suites).Should(ContainElement(TestSuite{
|
||||||
|
Path: relTmpDir + "/colonelmustard",
|
||||||
|
PackageName: "colonelmustard",
|
||||||
|
IsGinkgo: true,
|
||||||
|
Precompiled: false,
|
||||||
|
}))
|
||||||
|
Ω(suites).Should(ContainElement(TestSuite{
|
||||||
|
Path: relTmpDir + "/professorplum",
|
||||||
|
PackageName: "professorplum",
|
||||||
|
IsGinkgo: false,
|
||||||
|
Precompiled: false,
|
||||||
|
}))
|
||||||
|
Ω(suites).Should(ContainElement(TestSuite{
|
||||||
|
Path: relTmpDir + "/colonelmustard/library",
|
||||||
|
PackageName: "library",
|
||||||
|
IsGinkgo: true,
|
||||||
|
Precompiled: false,
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
16
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/vendor_check_go15.go
generated
vendored
Normal file
16
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/vendor_check_go15.go
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
// +build !go1.6
|
||||||
|
|
||||||
|
package testsuite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
)
|
||||||
|
|
||||||
|
// "This change will only be enabled if the go command is run with
|
||||||
|
// GO15VENDOREXPERIMENT=1 in its environment."
|
||||||
|
// c.f. the vendor-experiment proposal https://goo.gl/2ucMeC
|
||||||
|
func vendorExperimentCheck(dir string) bool {
|
||||||
|
vendorExperiment := os.Getenv("GO15VENDOREXPERIMENT")
|
||||||
|
return vendorExperiment == "1" && path.Base(dir) == "vendor"
|
||||||
|
}
|
202
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/vendor_check_go15_test.go
generated
vendored
Normal file
202
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/vendor_check_go15_test.go
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
// +build !go1.6
|
||||||
|
|
||||||
|
package testsuite_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("TestSuite", func() {
|
||||||
|
var tmpDir string
|
||||||
|
var relTmpDir string
|
||||||
|
|
||||||
|
writeFile := func(folder string, filename string, content string, mode os.FileMode) {
|
||||||
|
path := filepath.Join(tmpDir, folder)
|
||||||
|
err := os.MkdirAll(path, 0700)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
path = filepath.Join(path, filename)
|
||||||
|
ioutil.WriteFile(path, []byte(content), mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
var origVendor string
|
||||||
|
|
||||||
|
BeforeSuite(func() {
|
||||||
|
origVendor = os.Getenv("GO15VENDOREXPERIMENT")
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterSuite(func() {
|
||||||
|
os.Setenv("GO15VENDOREXPERIMENT", origVendor)
|
||||||
|
})
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
var err error
|
||||||
|
tmpDir, err = ioutil.TempDir("/tmp", "ginkgo")
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
cwd, err := os.Getwd()
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
relTmpDir, err = filepath.Rel(cwd, tmpDir)
|
||||||
|
relTmpDir = "./" + relTmpDir
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
//go files in the root directory (no tests)
|
||||||
|
writeFile("/", "main.go", "package main", 0666)
|
||||||
|
|
||||||
|
//non-go files in a nested directory
|
||||||
|
writeFile("/redherring", "big_test.jpg", "package ginkgo", 0666)
|
||||||
|
|
||||||
|
//non-ginkgo tests in a nested directory
|
||||||
|
writeFile("/professorplum", "professorplum_test.go", `import "testing"`, 0666)
|
||||||
|
|
||||||
|
//ginkgo tests in a nested directory
|
||||||
|
writeFile("/colonelmustard", "colonelmustard_test.go", `import "github.com/onsi/ginkgo"`, 0666)
|
||||||
|
|
||||||
|
//ginkgo tests in a deeply nested directory
|
||||||
|
writeFile("/colonelmustard/library", "library_test.go", `import "github.com/onsi/ginkgo"`, 0666)
|
||||||
|
|
||||||
|
//ginkgo tests deeply nested in a vendored dependency
|
||||||
|
writeFile("/vendor/mrspeacock/lounge", "lounge_test.go", `import "github.com/onsi/ginkgo"`, 0666)
|
||||||
|
|
||||||
|
//a precompiled ginkgo test
|
||||||
|
writeFile("/precompiled-dir", "precompiled.test", `fake-binary-file`, 0777)
|
||||||
|
writeFile("/precompiled-dir", "some-other-binary", `fake-binary-file`, 0777)
|
||||||
|
writeFile("/precompiled-dir", "nonexecutable.test", `fake-binary-file`, 0666)
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
os.RemoveAll(tmpDir)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Finding precompiled test suites", func() {
|
||||||
|
Context("if pointed at an executable file that ends with .test", func() {
|
||||||
|
It("should return a precompiled test suite", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "precompiled.test"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(suite).Should(Equal(TestSuite{
|
||||||
|
Path: relTmpDir + "/precompiled-dir",
|
||||||
|
PackageName: "precompiled",
|
||||||
|
IsGinkgo: true,
|
||||||
|
Precompiled: true,
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if pointed at a directory", func() {
|
||||||
|
It("should error", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir"))
|
||||||
|
Ω(suite).Should(BeZero())
|
||||||
|
Ω(err).Should(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if pointed at an executable that doesn't have .test", func() {
|
||||||
|
It("should error", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "some-other-binary"))
|
||||||
|
Ω(suite).Should(BeZero())
|
||||||
|
Ω(err).Should(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if pointed at a .test that isn't executable", func() {
|
||||||
|
It("should error", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "nonexecutable.test"))
|
||||||
|
Ω(suite).Should(BeZero())
|
||||||
|
Ω(err).Should(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if pointed at a nonexisting file", func() {
|
||||||
|
It("should error", func() {
|
||||||
|
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "nope-nothing-to-see-here"))
|
||||||
|
Ω(suite).Should(BeZero())
|
||||||
|
Ω(err).Should(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("scanning for suites in a directory", func() {
|
||||||
|
Context("when there are no tests in the specified directory", func() {
|
||||||
|
It("should come up empty", func() {
|
||||||
|
suites := SuitesInDir(tmpDir, false)
|
||||||
|
Ω(suites).Should(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when there are ginkgo tests in the specified directory", func() {
|
||||||
|
It("should return an appropriately configured suite", func() {
|
||||||
|
suites := SuitesInDir(filepath.Join(tmpDir, "colonelmustard"), false)
|
||||||
|
Ω(suites).Should(HaveLen(1))
|
||||||
|
|
||||||
|
Ω(suites[0].Path).Should(Equal(relTmpDir + "/colonelmustard"))
|
||||||
|
Ω(suites[0].PackageName).Should(Equal("colonelmustard"))
|
||||||
|
Ω(suites[0].IsGinkgo).Should(BeTrue())
|
||||||
|
Ω(suites[0].Precompiled).Should(BeFalse())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when there are non-ginkgo tests in the specified directory", func() {
|
||||||
|
It("should return an appropriately configured suite", func() {
|
||||||
|
suites := SuitesInDir(filepath.Join(tmpDir, "professorplum"), false)
|
||||||
|
Ω(suites).Should(HaveLen(1))
|
||||||
|
|
||||||
|
Ω(suites[0].Path).Should(Equal(relTmpDir + "/professorplum"))
|
||||||
|
Ω(suites[0].PackageName).Should(Equal("professorplum"))
|
||||||
|
Ω(suites[0].IsGinkgo).Should(BeFalse())
|
||||||
|
Ω(suites[0].Precompiled).Should(BeFalse())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("given GO15VENDOREXPERIMENT", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
os.Setenv("GO15VENDOREXPERIMENT", "1")
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
os.Setenv("GO15VENDOREXPERIMENT", "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should skip vendor dirs", func() {
|
||||||
|
suites := SuitesInDir(filepath.Join(tmpDir+"/vendor"), false)
|
||||||
|
Ω(suites).Should(HaveLen(0))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not recurse into vendor dirs", func() {
|
||||||
|
suites := SuitesInDir(filepath.Join(tmpDir), true)
|
||||||
|
Ω(suites).Should(HaveLen(3))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when recursively scanning", func() {
|
||||||
|
It("should return suites for corresponding test suites, only", func() {
|
||||||
|
suites := SuitesInDir(tmpDir, true)
|
||||||
|
Ω(suites).Should(HaveLen(4))
|
||||||
|
|
||||||
|
Ω(suites).Should(ContainElement(TestSuite{
|
||||||
|
Path: relTmpDir + "/colonelmustard",
|
||||||
|
PackageName: "colonelmustard",
|
||||||
|
IsGinkgo: true,
|
||||||
|
Precompiled: false,
|
||||||
|
}))
|
||||||
|
Ω(suites).Should(ContainElement(TestSuite{
|
||||||
|
Path: relTmpDir + "/professorplum",
|
||||||
|
PackageName: "professorplum",
|
||||||
|
IsGinkgo: false,
|
||||||
|
Precompiled: false,
|
||||||
|
}))
|
||||||
|
Ω(suites).Should(ContainElement(TestSuite{
|
||||||
|
Path: relTmpDir + "/colonelmustard/library",
|
||||||
|
PackageName: "library",
|
||||||
|
IsGinkgo: true,
|
||||||
|
Precompiled: false,
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
15
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/vendor_check_go16.go
generated
vendored
Normal file
15
vendor/github.com/onsi/ginkgo/ginkgo/testsuite/vendor_check_go16.go
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
// +build go1.6
|
||||||
|
|
||||||
|
package testsuite
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
)
|
||||||
|
|
||||||
|
// in 1.6 the vendor directory became the default go behaviour, so now
|
||||||
|
// check if its disabled.
|
||||||
|
func vendorExperimentCheck(dir string) bool {
|
||||||
|
vendorExperiment := os.Getenv("GO15VENDOREXPERIMENT")
|
||||||
|
return vendorExperiment != "0" && path.Base(dir) == "vendor"
|
||||||
|
}
|
38
vendor/github.com/onsi/ginkgo/ginkgo/unfocus_command.go
generated
vendored
Normal file
38
vendor/github.com/onsi/ginkgo/ginkgo/unfocus_command.go
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os/exec"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildUnfocusCommand() *Command {
|
||||||
|
return &Command{
|
||||||
|
Name: "unfocus",
|
||||||
|
AltName: "blur",
|
||||||
|
FlagSet: flag.NewFlagSet("unfocus", flag.ExitOnError),
|
||||||
|
UsageCommand: "ginkgo unfocus (or ginkgo blur)",
|
||||||
|
Usage: []string{
|
||||||
|
"Recursively unfocuses any focused tests under the current directory",
|
||||||
|
},
|
||||||
|
Command: unfocusSpecs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func unfocusSpecs([]string, []string) {
|
||||||
|
unfocus("Describe")
|
||||||
|
unfocus("Context")
|
||||||
|
unfocus("It")
|
||||||
|
unfocus("Measure")
|
||||||
|
unfocus("DescribeTable")
|
||||||
|
unfocus("Entry")
|
||||||
|
}
|
||||||
|
|
||||||
|
func unfocus(component string) {
|
||||||
|
fmt.Printf("Removing F%s...\n", component)
|
||||||
|
cmd := exec.Command("gofmt", fmt.Sprintf("-r=F%s -> %s", component, component), "-w", ".")
|
||||||
|
out, _ := cmd.CombinedOutput()
|
||||||
|
if string(out) != "" {
|
||||||
|
println(string(out))
|
||||||
|
}
|
||||||
|
}
|
23
vendor/github.com/onsi/ginkgo/ginkgo/version_command.go
generated
vendored
Normal file
23
vendor/github.com/onsi/ginkgo/ginkgo/version_command.go
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildVersionCommand() *Command {
|
||||||
|
return &Command{
|
||||||
|
Name: "version",
|
||||||
|
FlagSet: flag.NewFlagSet("version", flag.ExitOnError),
|
||||||
|
UsageCommand: "ginkgo version",
|
||||||
|
Usage: []string{
|
||||||
|
"Print Ginkgo's version",
|
||||||
|
},
|
||||||
|
Command: printVersion,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func printVersion([]string, []string) {
|
||||||
|
fmt.Printf("Ginkgo Version %s\n", config.VERSION)
|
||||||
|
}
|
22
vendor/github.com/onsi/ginkgo/ginkgo/watch/delta.go
generated
vendored
Normal file
22
vendor/github.com/onsi/ginkgo/ginkgo/watch/delta.go
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
package watch
|
||||||
|
|
||||||
|
import "sort"
|
||||||
|
|
||||||
|
type Delta struct {
|
||||||
|
ModifiedPackages []string
|
||||||
|
|
||||||
|
NewSuites []*Suite
|
||||||
|
RemovedSuites []*Suite
|
||||||
|
modifiedSuites []*Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
type DescendingByDelta []*Suite
|
||||||
|
|
||||||
|
func (a DescendingByDelta) Len() int { return len(a) }
|
||||||
|
func (a DescendingByDelta) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||||
|
func (a DescendingByDelta) Less(i, j int) bool { return a[i].Delta() > a[j].Delta() }
|
||||||
|
|
||||||
|
func (d Delta) ModifiedSuites() []*Suite {
|
||||||
|
sort.Sort(DescendingByDelta(d.modifiedSuites))
|
||||||
|
return d.modifiedSuites
|
||||||
|
}
|
71
vendor/github.com/onsi/ginkgo/ginkgo/watch/delta_tracker.go
generated
vendored
Normal file
71
vendor/github.com/onsi/ginkgo/ginkgo/watch/delta_tracker.go
generated
vendored
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
package watch
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SuiteErrors map[testsuite.TestSuite]error
|
||||||
|
|
||||||
|
type DeltaTracker struct {
|
||||||
|
maxDepth int
|
||||||
|
suites map[string]*Suite
|
||||||
|
packageHashes *PackageHashes
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDeltaTracker(maxDepth int) *DeltaTracker {
|
||||||
|
return &DeltaTracker{
|
||||||
|
maxDepth: maxDepth,
|
||||||
|
packageHashes: NewPackageHashes(),
|
||||||
|
suites: map[string]*Suite{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *DeltaTracker) Delta(suites []testsuite.TestSuite) (delta Delta, errors SuiteErrors) {
|
||||||
|
errors = SuiteErrors{}
|
||||||
|
delta.ModifiedPackages = d.packageHashes.CheckForChanges()
|
||||||
|
|
||||||
|
providedSuitePaths := map[string]bool{}
|
||||||
|
for _, suite := range suites {
|
||||||
|
providedSuitePaths[suite.Path] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
d.packageHashes.StartTrackingUsage()
|
||||||
|
|
||||||
|
for _, suite := range d.suites {
|
||||||
|
if providedSuitePaths[suite.Suite.Path] {
|
||||||
|
if suite.Delta() > 0 {
|
||||||
|
delta.modifiedSuites = append(delta.modifiedSuites, suite)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
delta.RemovedSuites = append(delta.RemovedSuites, suite)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
d.packageHashes.StopTrackingUsageAndPrune()
|
||||||
|
|
||||||
|
for _, suite := range suites {
|
||||||
|
_, ok := d.suites[suite.Path]
|
||||||
|
if !ok {
|
||||||
|
s, err := NewSuite(suite, d.maxDepth, d.packageHashes)
|
||||||
|
if err != nil {
|
||||||
|
errors[suite] = err
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
d.suites[suite.Path] = s
|
||||||
|
delta.NewSuites = append(delta.NewSuites, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return delta, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *DeltaTracker) WillRun(suite testsuite.TestSuite) error {
|
||||||
|
s, ok := d.suites[suite.Path]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("unknown suite %s", suite.Path)
|
||||||
|
}
|
||||||
|
|
||||||
|
return s.MarkAsRunAndRecomputedDependencies(d.maxDepth)
|
||||||
|
}
|
91
vendor/github.com/onsi/ginkgo/ginkgo/watch/dependencies.go
generated
vendored
Normal file
91
vendor/github.com/onsi/ginkgo/ginkgo/watch/dependencies.go
generated
vendored
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package watch
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/build"
|
||||||
|
"regexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ginkgoAndGomegaFilter = regexp.MustCompile(`github\.com/onsi/ginkgo|github\.com/onsi/gomega`)
|
||||||
|
|
||||||
|
type Dependencies struct {
|
||||||
|
deps map[string]int
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDependencies(path string, maxDepth int) (Dependencies, error) {
|
||||||
|
d := Dependencies{
|
||||||
|
deps: map[string]int{},
|
||||||
|
}
|
||||||
|
|
||||||
|
if maxDepth == 0 {
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := d.seedWithDepsForPackageAtPath(path)
|
||||||
|
if err != nil {
|
||||||
|
return d, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for depth := 1; depth < maxDepth; depth++ {
|
||||||
|
n := len(d.deps)
|
||||||
|
d.addDepsForDepth(depth)
|
||||||
|
if n == len(d.deps) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Dependencies) Dependencies() map[string]int {
|
||||||
|
return d.deps
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Dependencies) seedWithDepsForPackageAtPath(path string) error {
|
||||||
|
pkg, err := build.ImportDir(path, 0)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
d.resolveAndAdd(pkg.Imports, 1)
|
||||||
|
d.resolveAndAdd(pkg.TestImports, 1)
|
||||||
|
d.resolveAndAdd(pkg.XTestImports, 1)
|
||||||
|
|
||||||
|
delete(d.deps, pkg.Dir)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Dependencies) addDepsForDepth(depth int) {
|
||||||
|
for dep, depDepth := range d.deps {
|
||||||
|
if depDepth == depth {
|
||||||
|
d.addDepsForDep(dep, depth+1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Dependencies) addDepsForDep(dep string, depth int) {
|
||||||
|
pkg, err := build.ImportDir(dep, 0)
|
||||||
|
if err != nil {
|
||||||
|
println(err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
d.resolveAndAdd(pkg.Imports, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Dependencies) resolveAndAdd(deps []string, depth int) {
|
||||||
|
for _, dep := range deps {
|
||||||
|
pkg, err := build.Import(dep, ".", 0)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if pkg.Goroot == false && !ginkgoAndGomegaFilter.Match([]byte(pkg.Dir)) {
|
||||||
|
d.addDepIfNotPresent(pkg.Dir, depth)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d Dependencies) addDepIfNotPresent(dep string, depth int) {
|
||||||
|
_, ok := d.deps[dep]
|
||||||
|
if !ok {
|
||||||
|
d.deps[dep] = depth
|
||||||
|
}
|
||||||
|
}
|
103
vendor/github.com/onsi/ginkgo/ginkgo/watch/package_hash.go
generated
vendored
Normal file
103
vendor/github.com/onsi/ginkgo/ginkgo/watch/package_hash.go
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
package watch
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var goRegExp = regexp.MustCompile(`\.go$`)
|
||||||
|
var goTestRegExp = regexp.MustCompile(`_test\.go$`)
|
||||||
|
|
||||||
|
type PackageHash struct {
|
||||||
|
CodeModifiedTime time.Time
|
||||||
|
TestModifiedTime time.Time
|
||||||
|
Deleted bool
|
||||||
|
|
||||||
|
path string
|
||||||
|
codeHash string
|
||||||
|
testHash string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewPackageHash(path string) *PackageHash {
|
||||||
|
p := &PackageHash{
|
||||||
|
path: path,
|
||||||
|
}
|
||||||
|
|
||||||
|
p.codeHash, _, p.testHash, _, p.Deleted = p.computeHashes()
|
||||||
|
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackageHash) CheckForChanges() bool {
|
||||||
|
codeHash, codeModifiedTime, testHash, testModifiedTime, deleted := p.computeHashes()
|
||||||
|
|
||||||
|
if deleted {
|
||||||
|
if p.Deleted == false {
|
||||||
|
t := time.Now()
|
||||||
|
p.CodeModifiedTime = t
|
||||||
|
p.TestModifiedTime = t
|
||||||
|
}
|
||||||
|
p.Deleted = true
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
modified := false
|
||||||
|
p.Deleted = false
|
||||||
|
|
||||||
|
if p.codeHash != codeHash {
|
||||||
|
p.CodeModifiedTime = codeModifiedTime
|
||||||
|
modified = true
|
||||||
|
}
|
||||||
|
if p.testHash != testHash {
|
||||||
|
p.TestModifiedTime = testModifiedTime
|
||||||
|
modified = true
|
||||||
|
}
|
||||||
|
|
||||||
|
p.codeHash = codeHash
|
||||||
|
p.testHash = testHash
|
||||||
|
return modified
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackageHash) computeHashes() (codeHash string, codeModifiedTime time.Time, testHash string, testModifiedTime time.Time, deleted bool) {
|
||||||
|
infos, err := ioutil.ReadDir(p.path)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
deleted = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, info := range infos {
|
||||||
|
if info.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if goTestRegExp.Match([]byte(info.Name())) {
|
||||||
|
testHash += p.hashForFileInfo(info)
|
||||||
|
if info.ModTime().After(testModifiedTime) {
|
||||||
|
testModifiedTime = info.ModTime()
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if goRegExp.Match([]byte(info.Name())) {
|
||||||
|
codeHash += p.hashForFileInfo(info)
|
||||||
|
if info.ModTime().After(codeModifiedTime) {
|
||||||
|
codeModifiedTime = info.ModTime()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
testHash += codeHash
|
||||||
|
if codeModifiedTime.After(testModifiedTime) {
|
||||||
|
testModifiedTime = codeModifiedTime
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackageHash) hashForFileInfo(info os.FileInfo) string {
|
||||||
|
return fmt.Sprintf("%s_%d_%d", info.Name(), info.Size(), info.ModTime().UnixNano())
|
||||||
|
}
|
82
vendor/github.com/onsi/ginkgo/ginkgo/watch/package_hashes.go
generated
vendored
Normal file
82
vendor/github.com/onsi/ginkgo/ginkgo/watch/package_hashes.go
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
package watch
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
type PackageHashes struct {
|
||||||
|
PackageHashes map[string]*PackageHash
|
||||||
|
usedPaths map[string]bool
|
||||||
|
lock *sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewPackageHashes() *PackageHashes {
|
||||||
|
return &PackageHashes{
|
||||||
|
PackageHashes: map[string]*PackageHash{},
|
||||||
|
usedPaths: nil,
|
||||||
|
lock: &sync.Mutex{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackageHashes) CheckForChanges() []string {
|
||||||
|
p.lock.Lock()
|
||||||
|
defer p.lock.Unlock()
|
||||||
|
|
||||||
|
modified := []string{}
|
||||||
|
|
||||||
|
for _, packageHash := range p.PackageHashes {
|
||||||
|
if packageHash.CheckForChanges() {
|
||||||
|
modified = append(modified, packageHash.path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return modified
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackageHashes) Add(path string) *PackageHash {
|
||||||
|
p.lock.Lock()
|
||||||
|
defer p.lock.Unlock()
|
||||||
|
|
||||||
|
path, _ = filepath.Abs(path)
|
||||||
|
_, ok := p.PackageHashes[path]
|
||||||
|
if !ok {
|
||||||
|
p.PackageHashes[path] = NewPackageHash(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.usedPaths != nil {
|
||||||
|
p.usedPaths[path] = true
|
||||||
|
}
|
||||||
|
return p.PackageHashes[path]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackageHashes) Get(path string) *PackageHash {
|
||||||
|
p.lock.Lock()
|
||||||
|
defer p.lock.Unlock()
|
||||||
|
|
||||||
|
path, _ = filepath.Abs(path)
|
||||||
|
if p.usedPaths != nil {
|
||||||
|
p.usedPaths[path] = true
|
||||||
|
}
|
||||||
|
return p.PackageHashes[path]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackageHashes) StartTrackingUsage() {
|
||||||
|
p.lock.Lock()
|
||||||
|
defer p.lock.Unlock()
|
||||||
|
|
||||||
|
p.usedPaths = map[string]bool{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PackageHashes) StopTrackingUsageAndPrune() {
|
||||||
|
p.lock.Lock()
|
||||||
|
defer p.lock.Unlock()
|
||||||
|
|
||||||
|
for path := range p.PackageHashes {
|
||||||
|
if !p.usedPaths[path] {
|
||||||
|
delete(p.PackageHashes, path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
p.usedPaths = nil
|
||||||
|
}
|
87
vendor/github.com/onsi/ginkgo/ginkgo/watch/suite.go
generated
vendored
Normal file
87
vendor/github.com/onsi/ginkgo/ginkgo/watch/suite.go
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
package watch
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Suite struct {
|
||||||
|
Suite testsuite.TestSuite
|
||||||
|
RunTime time.Time
|
||||||
|
Dependencies Dependencies
|
||||||
|
|
||||||
|
sharedPackageHashes *PackageHashes
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSuite(suite testsuite.TestSuite, maxDepth int, sharedPackageHashes *PackageHashes) (*Suite, error) {
|
||||||
|
deps, err := NewDependencies(suite.Path, maxDepth)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sharedPackageHashes.Add(suite.Path)
|
||||||
|
for dep := range deps.Dependencies() {
|
||||||
|
sharedPackageHashes.Add(dep)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Suite{
|
||||||
|
Suite: suite,
|
||||||
|
Dependencies: deps,
|
||||||
|
|
||||||
|
sharedPackageHashes: sharedPackageHashes,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Suite) Delta() float64 {
|
||||||
|
delta := s.delta(s.Suite.Path, true, 0) * 1000
|
||||||
|
for dep, depth := range s.Dependencies.Dependencies() {
|
||||||
|
delta += s.delta(dep, false, depth)
|
||||||
|
}
|
||||||
|
return delta
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Suite) MarkAsRunAndRecomputedDependencies(maxDepth int) error {
|
||||||
|
s.RunTime = time.Now()
|
||||||
|
|
||||||
|
deps, err := NewDependencies(s.Suite.Path, maxDepth)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
s.sharedPackageHashes.Add(s.Suite.Path)
|
||||||
|
for dep := range deps.Dependencies() {
|
||||||
|
s.sharedPackageHashes.Add(dep)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.Dependencies = deps
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Suite) Description() string {
|
||||||
|
numDeps := len(s.Dependencies.Dependencies())
|
||||||
|
pluralizer := "ies"
|
||||||
|
if numDeps == 1 {
|
||||||
|
pluralizer = "y"
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s [%d dependenc%s]", s.Suite.Path, numDeps, pluralizer)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Suite) delta(packagePath string, includeTests bool, depth int) float64 {
|
||||||
|
return math.Max(float64(s.dt(packagePath, includeTests)), 0) / float64(depth+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Suite) dt(packagePath string, includeTests bool) time.Duration {
|
||||||
|
packageHash := s.sharedPackageHashes.Get(packagePath)
|
||||||
|
var modifiedTime time.Time
|
||||||
|
if includeTests {
|
||||||
|
modifiedTime = packageHash.TestModifiedTime
|
||||||
|
} else {
|
||||||
|
modifiedTime = packageHash.CodeModifiedTime
|
||||||
|
}
|
||||||
|
|
||||||
|
return modifiedTime.Sub(s.RunTime)
|
||||||
|
}
|
174
vendor/github.com/onsi/ginkgo/ginkgo/watch_command.go
generated
vendored
Normal file
174
vendor/github.com/onsi/ginkgo/ginkgo/watch_command.go
generated
vendored
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/interrupthandler"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testrunner"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||||
|
"github.com/onsi/ginkgo/ginkgo/watch"
|
||||||
|
colorable "github.com/onsi/ginkgo/reporters/stenographer/support/go-colorable"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BuildWatchCommand() *Command {
|
||||||
|
commandFlags := NewWatchCommandFlags(flag.NewFlagSet("watch", flag.ExitOnError))
|
||||||
|
interruptHandler := interrupthandler.NewInterruptHandler()
|
||||||
|
notifier := NewNotifier(commandFlags)
|
||||||
|
watcher := &SpecWatcher{
|
||||||
|
commandFlags: commandFlags,
|
||||||
|
notifier: notifier,
|
||||||
|
interruptHandler: interruptHandler,
|
||||||
|
suiteRunner: NewSuiteRunner(notifier, interruptHandler),
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Command{
|
||||||
|
Name: "watch",
|
||||||
|
FlagSet: commandFlags.FlagSet,
|
||||||
|
UsageCommand: "ginkgo watch <FLAGS> <PACKAGES> -- <PASS-THROUGHS>",
|
||||||
|
Usage: []string{
|
||||||
|
"Watches the tests in the passed in <PACKAGES> and runs them when changes occur.",
|
||||||
|
"Any arguments after -- will be passed to the test.",
|
||||||
|
},
|
||||||
|
Command: watcher.WatchSpecs,
|
||||||
|
SuppressFlagDocumentation: true,
|
||||||
|
FlagDocSubstitute: []string{
|
||||||
|
"Accepts all the flags that the ginkgo command accepts except for --keepGoing and --untilItFails",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type SpecWatcher struct {
|
||||||
|
commandFlags *RunWatchAndBuildCommandFlags
|
||||||
|
notifier *Notifier
|
||||||
|
interruptHandler *interrupthandler.InterruptHandler
|
||||||
|
suiteRunner *SuiteRunner
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *SpecWatcher) WatchSpecs(args []string, additionalArgs []string) {
|
||||||
|
w.commandFlags.computeNodes()
|
||||||
|
w.notifier.VerifyNotificationsAreAvailable()
|
||||||
|
|
||||||
|
w.WatchSuites(args, additionalArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *SpecWatcher) runnersForSuites(suites []testsuite.TestSuite, additionalArgs []string) []*testrunner.TestRunner {
|
||||||
|
runners := []*testrunner.TestRunner{}
|
||||||
|
|
||||||
|
for _, suite := range suites {
|
||||||
|
runners = append(runners, testrunner.New(suite, w.commandFlags.NumCPU, w.commandFlags.ParallelStream, w.commandFlags.GoOpts, additionalArgs))
|
||||||
|
}
|
||||||
|
|
||||||
|
return runners
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *SpecWatcher) WatchSuites(args []string, additionalArgs []string) {
|
||||||
|
suites, _ := findSuites(args, w.commandFlags.Recurse, w.commandFlags.SkipPackage, false)
|
||||||
|
|
||||||
|
if len(suites) == 0 {
|
||||||
|
complainAndQuit("Found no test suites")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Identified %d test %s. Locating dependencies to a depth of %d (this may take a while)...\n", len(suites), pluralizedWord("suite", "suites", len(suites)), w.commandFlags.Depth)
|
||||||
|
deltaTracker := watch.NewDeltaTracker(w.commandFlags.Depth)
|
||||||
|
delta, errors := deltaTracker.Delta(suites)
|
||||||
|
|
||||||
|
fmt.Printf("Watching %d %s:\n", len(delta.NewSuites), pluralizedWord("suite", "suites", len(delta.NewSuites)))
|
||||||
|
for _, suite := range delta.NewSuites {
|
||||||
|
fmt.Println(" " + suite.Description())
|
||||||
|
}
|
||||||
|
|
||||||
|
for suite, err := range errors {
|
||||||
|
fmt.Printf("Failed to watch %s: %s\n", suite.PackageName, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(suites) == 1 {
|
||||||
|
runners := w.runnersForSuites(suites, additionalArgs)
|
||||||
|
w.suiteRunner.RunSuites(runners, w.commandFlags.NumCompilers, true, nil)
|
||||||
|
runners[0].CleanUp()
|
||||||
|
}
|
||||||
|
|
||||||
|
ticker := time.NewTicker(time.Second)
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ticker.C:
|
||||||
|
suites, _ := findSuites(args, w.commandFlags.Recurse, w.commandFlags.SkipPackage, false)
|
||||||
|
delta, _ := deltaTracker.Delta(suites)
|
||||||
|
coloredStream := colorable.NewColorableStdout()
|
||||||
|
|
||||||
|
suitesToRun := []testsuite.TestSuite{}
|
||||||
|
|
||||||
|
if len(delta.NewSuites) > 0 {
|
||||||
|
fmt.Fprintf(coloredStream, greenColor+"Detected %d new %s:\n"+defaultStyle, len(delta.NewSuites), pluralizedWord("suite", "suites", len(delta.NewSuites)))
|
||||||
|
for _, suite := range delta.NewSuites {
|
||||||
|
suitesToRun = append(suitesToRun, suite.Suite)
|
||||||
|
fmt.Fprintln(coloredStream, " "+suite.Description())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
modifiedSuites := delta.ModifiedSuites()
|
||||||
|
if len(modifiedSuites) > 0 {
|
||||||
|
fmt.Fprintln(coloredStream, greenColor+"\nDetected changes in:"+defaultStyle)
|
||||||
|
for _, pkg := range delta.ModifiedPackages {
|
||||||
|
fmt.Fprintln(coloredStream, " "+pkg)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(coloredStream, greenColor+"Will run %d %s:\n"+defaultStyle, len(modifiedSuites), pluralizedWord("suite", "suites", len(modifiedSuites)))
|
||||||
|
for _, suite := range modifiedSuites {
|
||||||
|
suitesToRun = append(suitesToRun, suite.Suite)
|
||||||
|
fmt.Fprintln(coloredStream, " "+suite.Description())
|
||||||
|
}
|
||||||
|
fmt.Fprintln(coloredStream, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(suitesToRun) > 0 {
|
||||||
|
w.UpdateSeed()
|
||||||
|
w.ComputeSuccinctMode(len(suitesToRun))
|
||||||
|
runners := w.runnersForSuites(suitesToRun, additionalArgs)
|
||||||
|
result, _ := w.suiteRunner.RunSuites(runners, w.commandFlags.NumCompilers, true, func(suite testsuite.TestSuite) {
|
||||||
|
deltaTracker.WillRun(suite)
|
||||||
|
})
|
||||||
|
for _, runner := range runners {
|
||||||
|
runner.CleanUp()
|
||||||
|
}
|
||||||
|
if !w.interruptHandler.WasInterrupted() {
|
||||||
|
color := redColor
|
||||||
|
if result.Passed {
|
||||||
|
color = greenColor
|
||||||
|
}
|
||||||
|
fmt.Fprintln(coloredStream, color+"\nDone. Resuming watch..."+defaultStyle)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case <-w.interruptHandler.C:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *SpecWatcher) ComputeSuccinctMode(numSuites int) {
|
||||||
|
if config.DefaultReporterConfig.Verbose {
|
||||||
|
config.DefaultReporterConfig.Succinct = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if w.commandFlags.wasSet("succinct") {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if numSuites == 1 {
|
||||||
|
config.DefaultReporterConfig.Succinct = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if numSuites > 1 {
|
||||||
|
config.DefaultReporterConfig.Succinct = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *SpecWatcher) UpdateSeed() {
|
||||||
|
if !w.commandFlags.wasSet("seed") {
|
||||||
|
config.GinkgoConfig.RandomSeed = time.Now().Unix()
|
||||||
|
}
|
||||||
|
}
|
569
vendor/github.com/onsi/ginkgo/ginkgo_dsl.go
generated
vendored
Normal file
569
vendor/github.com/onsi/ginkgo/ginkgo_dsl.go
generated
vendored
Normal file
@ -0,0 +1,569 @@
|
|||||||
|
/*
|
||||||
|
Ginkgo is a BDD-style testing framework for Golang
|
||||||
|
|
||||||
|
The godoc documentation describes Ginkgo's API. More comprehensive documentation (with examples!) is available at http://onsi.github.io/ginkgo/
|
||||||
|
|
||||||
|
Ginkgo's preferred matcher library is [Gomega](http://github.com/onsi/gomega)
|
||||||
|
|
||||||
|
Ginkgo on Github: http://github.com/onsi/ginkgo
|
||||||
|
|
||||||
|
Ginkgo is MIT-Licensed
|
||||||
|
*/
|
||||||
|
package ginkgo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
"github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/internal/remote"
|
||||||
|
"github.com/onsi/ginkgo/internal/suite"
|
||||||
|
"github.com/onsi/ginkgo/internal/testingtproxy"
|
||||||
|
"github.com/onsi/ginkgo/internal/writer"
|
||||||
|
"github.com/onsi/ginkgo/reporters"
|
||||||
|
"github.com/onsi/ginkgo/reporters/stenographer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
const GINKGO_VERSION = config.VERSION
|
||||||
|
const GINKGO_PANIC = `
|
||||||
|
Your test failed.
|
||||||
|
Ginkgo panics to prevent subsequent assertions from running.
|
||||||
|
Normally Ginkgo rescues this panic so you shouldn't see it.
|
||||||
|
|
||||||
|
But, if you make an assertion in a goroutine, Ginkgo can't capture the panic.
|
||||||
|
To circumvent this, you should call
|
||||||
|
|
||||||
|
defer GinkgoRecover()
|
||||||
|
|
||||||
|
at the top of the goroutine that caused this panic.
|
||||||
|
`
|
||||||
|
const defaultTimeout = 1
|
||||||
|
|
||||||
|
var globalSuite *suite.Suite
|
||||||
|
var globalFailer *failer.Failer
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
config.Flags(flag.CommandLine, "ginkgo", true)
|
||||||
|
GinkgoWriter = writer.New(os.Stdout)
|
||||||
|
globalFailer = failer.New()
|
||||||
|
globalSuite = suite.New(globalFailer)
|
||||||
|
}
|
||||||
|
|
||||||
|
//GinkgoWriter implements an io.Writer
|
||||||
|
//When running in verbose mode any writes to GinkgoWriter will be immediately printed
|
||||||
|
//to stdout. Otherwise, GinkgoWriter will buffer any writes produced during the current test and flush them to screen
|
||||||
|
//only if the current test fails.
|
||||||
|
var GinkgoWriter io.Writer
|
||||||
|
|
||||||
|
//The interface by which Ginkgo receives *testing.T
|
||||||
|
type GinkgoTestingT interface {
|
||||||
|
Fail()
|
||||||
|
}
|
||||||
|
|
||||||
|
//GinkgoRandomSeed returns the seed used to randomize spec execution order. It is
|
||||||
|
//useful for seeding your own pseudorandom number generators (PRNGs) to ensure
|
||||||
|
//consistent executions from run to run, where your tests contain variability (for
|
||||||
|
//example, when selecting random test data).
|
||||||
|
func GinkgoRandomSeed() int64 {
|
||||||
|
return config.GinkgoConfig.RandomSeed
|
||||||
|
}
|
||||||
|
|
||||||
|
//GinkgoParallelNode returns the parallel node number for the current ginkgo process
|
||||||
|
//The node number is 1-indexed
|
||||||
|
func GinkgoParallelNode() int {
|
||||||
|
return config.GinkgoConfig.ParallelNode
|
||||||
|
}
|
||||||
|
|
||||||
|
//Some matcher libraries or legacy codebases require a *testing.T
|
||||||
|
//GinkgoT implements an interface analogous to *testing.T and can be used if
|
||||||
|
//the library in question accepts *testing.T through an interface
|
||||||
|
//
|
||||||
|
// For example, with testify:
|
||||||
|
// assert.Equal(GinkgoT(), 123, 123, "they should be equal")
|
||||||
|
//
|
||||||
|
// Or with gomock:
|
||||||
|
// gomock.NewController(GinkgoT())
|
||||||
|
//
|
||||||
|
// GinkgoT() takes an optional offset argument that can be used to get the
|
||||||
|
// correct line number associated with the failure.
|
||||||
|
func GinkgoT(optionalOffset ...int) GinkgoTInterface {
|
||||||
|
offset := 3
|
||||||
|
if len(optionalOffset) > 0 {
|
||||||
|
offset = optionalOffset[0]
|
||||||
|
}
|
||||||
|
return testingtproxy.New(GinkgoWriter, Fail, offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
//The interface returned by GinkgoT(). This covers most of the methods
|
||||||
|
//in the testing package's T.
|
||||||
|
type GinkgoTInterface interface {
|
||||||
|
Fail()
|
||||||
|
Error(args ...interface{})
|
||||||
|
Errorf(format string, args ...interface{})
|
||||||
|
FailNow()
|
||||||
|
Fatal(args ...interface{})
|
||||||
|
Fatalf(format string, args ...interface{})
|
||||||
|
Log(args ...interface{})
|
||||||
|
Logf(format string, args ...interface{})
|
||||||
|
Failed() bool
|
||||||
|
Parallel()
|
||||||
|
Skip(args ...interface{})
|
||||||
|
Skipf(format string, args ...interface{})
|
||||||
|
SkipNow()
|
||||||
|
Skipped() bool
|
||||||
|
}
|
||||||
|
|
||||||
|
//Custom Ginkgo test reporters must implement the Reporter interface.
|
||||||
|
//
|
||||||
|
//The custom reporter is passed in a SuiteSummary when the suite begins and ends,
|
||||||
|
//and a SpecSummary just before a spec begins and just after a spec ends
|
||||||
|
type Reporter reporters.Reporter
|
||||||
|
|
||||||
|
//Asynchronous specs are given a channel of the Done type. You must close or write to the channel
|
||||||
|
//to tell Ginkgo that your async test is done.
|
||||||
|
type Done chan<- interface{}
|
||||||
|
|
||||||
|
//GinkgoTestDescription represents the information about the current running test returned by CurrentGinkgoTestDescription
|
||||||
|
// FullTestText: a concatenation of ComponentTexts and the TestText
|
||||||
|
// ComponentTexts: a list of all texts for the Describes & Contexts leading up to the current test
|
||||||
|
// TestText: the text in the actual It or Measure node
|
||||||
|
// IsMeasurement: true if the current test is a measurement
|
||||||
|
// FileName: the name of the file containing the current test
|
||||||
|
// LineNumber: the line number for the current test
|
||||||
|
// Failed: if the current test has failed, this will be true (useful in an AfterEach)
|
||||||
|
type GinkgoTestDescription struct {
|
||||||
|
FullTestText string
|
||||||
|
ComponentTexts []string
|
||||||
|
TestText string
|
||||||
|
|
||||||
|
IsMeasurement bool
|
||||||
|
|
||||||
|
FileName string
|
||||||
|
LineNumber int
|
||||||
|
|
||||||
|
Failed bool
|
||||||
|
}
|
||||||
|
|
||||||
|
//CurrentGinkgoTestDescripton returns information about the current running test.
|
||||||
|
func CurrentGinkgoTestDescription() GinkgoTestDescription {
|
||||||
|
summary, ok := globalSuite.CurrentRunningSpecSummary()
|
||||||
|
if !ok {
|
||||||
|
return GinkgoTestDescription{}
|
||||||
|
}
|
||||||
|
|
||||||
|
subjectCodeLocation := summary.ComponentCodeLocations[len(summary.ComponentCodeLocations)-1]
|
||||||
|
|
||||||
|
return GinkgoTestDescription{
|
||||||
|
ComponentTexts: summary.ComponentTexts[1:],
|
||||||
|
FullTestText: strings.Join(summary.ComponentTexts[1:], " "),
|
||||||
|
TestText: summary.ComponentTexts[len(summary.ComponentTexts)-1],
|
||||||
|
IsMeasurement: summary.IsMeasurement,
|
||||||
|
FileName: subjectCodeLocation.FileName,
|
||||||
|
LineNumber: subjectCodeLocation.LineNumber,
|
||||||
|
Failed: summary.HasFailureState(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Measurement tests receive a Benchmarker.
|
||||||
|
//
|
||||||
|
//You use the Time() function to time how long the passed in body function takes to run
|
||||||
|
//You use the RecordValue() function to track arbitrary numerical measurements.
|
||||||
|
//The RecordValueWithPrecision() function can be used alternatively to provide the unit
|
||||||
|
//and resolution of the numeric measurement.
|
||||||
|
//The optional info argument is passed to the test reporter and can be used to
|
||||||
|
// provide the measurement data to a custom reporter with context.
|
||||||
|
//
|
||||||
|
//See http://onsi.github.io/ginkgo/#benchmark_tests for more details
|
||||||
|
type Benchmarker interface {
|
||||||
|
Time(name string, body func(), info ...interface{}) (elapsedTime time.Duration)
|
||||||
|
RecordValue(name string, value float64, info ...interface{})
|
||||||
|
RecordValueWithPrecision(name string, value float64, units string, precision int, info ...interface{})
|
||||||
|
}
|
||||||
|
|
||||||
|
//RunSpecs is the entry point for the Ginkgo test runner.
|
||||||
|
//You must call this within a Golang testing TestX(t *testing.T) function.
|
||||||
|
//
|
||||||
|
//To bootstrap a test suite you can use the Ginkgo CLI:
|
||||||
|
//
|
||||||
|
// ginkgo bootstrap
|
||||||
|
func RunSpecs(t GinkgoTestingT, description string) bool {
|
||||||
|
specReporters := []Reporter{buildDefaultReporter()}
|
||||||
|
return RunSpecsWithCustomReporters(t, description, specReporters)
|
||||||
|
}
|
||||||
|
|
||||||
|
//To run your tests with Ginkgo's default reporter and your custom reporter(s), replace
|
||||||
|
//RunSpecs() with this method.
|
||||||
|
func RunSpecsWithDefaultAndCustomReporters(t GinkgoTestingT, description string, specReporters []Reporter) bool {
|
||||||
|
specReporters = append([]Reporter{buildDefaultReporter()}, specReporters...)
|
||||||
|
return RunSpecsWithCustomReporters(t, description, specReporters)
|
||||||
|
}
|
||||||
|
|
||||||
|
//To run your tests with your custom reporter(s) (and *not* Ginkgo's default reporter), replace
|
||||||
|
//RunSpecs() with this method. Note that parallel tests will not work correctly without the default reporter
|
||||||
|
func RunSpecsWithCustomReporters(t GinkgoTestingT, description string, specReporters []Reporter) bool {
|
||||||
|
writer := GinkgoWriter.(*writer.Writer)
|
||||||
|
writer.SetStream(config.DefaultReporterConfig.Verbose)
|
||||||
|
reporters := make([]reporters.Reporter, len(specReporters))
|
||||||
|
for i, reporter := range specReporters {
|
||||||
|
reporters[i] = reporter
|
||||||
|
}
|
||||||
|
passed, hasFocusedTests := globalSuite.Run(t, description, reporters, writer, config.GinkgoConfig)
|
||||||
|
if passed && hasFocusedTests {
|
||||||
|
fmt.Println("PASS | FOCUSED")
|
||||||
|
os.Exit(types.GINKGO_FOCUS_EXIT_CODE)
|
||||||
|
}
|
||||||
|
return passed
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildDefaultReporter() Reporter {
|
||||||
|
remoteReportingServer := config.GinkgoConfig.StreamHost
|
||||||
|
if remoteReportingServer == "" {
|
||||||
|
stenographer := stenographer.New(!config.DefaultReporterConfig.NoColor, config.GinkgoConfig.FlakeAttempts > 1)
|
||||||
|
return reporters.NewDefaultReporter(config.DefaultReporterConfig, stenographer)
|
||||||
|
} else {
|
||||||
|
return remote.NewForwardingReporter(remoteReportingServer, &http.Client{}, remote.NewOutputInterceptor())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Skip notifies Ginkgo that the current spec should be skipped.
|
||||||
|
func Skip(message string, callerSkip ...int) {
|
||||||
|
skip := 0
|
||||||
|
if len(callerSkip) > 0 {
|
||||||
|
skip = callerSkip[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
globalFailer.Skip(message, codelocation.New(skip+1))
|
||||||
|
panic(GINKGO_PANIC)
|
||||||
|
}
|
||||||
|
|
||||||
|
//Fail notifies Ginkgo that the current spec has failed. (Gomega will call Fail for you automatically when an assertion fails.)
|
||||||
|
func Fail(message string, callerSkip ...int) {
|
||||||
|
skip := 0
|
||||||
|
if len(callerSkip) > 0 {
|
||||||
|
skip = callerSkip[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
globalFailer.Fail(message, codelocation.New(skip+1))
|
||||||
|
panic(GINKGO_PANIC)
|
||||||
|
}
|
||||||
|
|
||||||
|
//GinkgoRecover should be deferred at the top of any spawned goroutine that (may) call `Fail`
|
||||||
|
//Since Gomega assertions call fail, you should throw a `defer GinkgoRecover()` at the top of any goroutine that
|
||||||
|
//calls out to Gomega
|
||||||
|
//
|
||||||
|
//Here's why: Ginkgo's `Fail` method records the failure and then panics to prevent
|
||||||
|
//further assertions from running. This panic must be recovered. Ginkgo does this for you
|
||||||
|
//if the panic originates in a Ginkgo node (an It, BeforeEach, etc...)
|
||||||
|
//
|
||||||
|
//Unfortunately, if a panic originates on a goroutine *launched* from one of these nodes there's no
|
||||||
|
//way for Ginkgo to rescue the panic. To do this, you must remember to `defer GinkgoRecover()` at the top of such a goroutine.
|
||||||
|
func GinkgoRecover() {
|
||||||
|
e := recover()
|
||||||
|
if e != nil {
|
||||||
|
globalFailer.Panic(codelocation.New(1), e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Describe blocks allow you to organize your specs. A Describe block can contain any number of
|
||||||
|
//BeforeEach, AfterEach, JustBeforeEach, It, and Measurement blocks.
|
||||||
|
//
|
||||||
|
//In addition you can nest Describe and Context blocks. Describe and Context blocks are functionally
|
||||||
|
//equivalent. The difference is purely semantic -- you typical Describe the behavior of an object
|
||||||
|
//or method and, within that Describe, outline a number of Contexts.
|
||||||
|
func Describe(text string, body func()) bool {
|
||||||
|
globalSuite.PushContainerNode(text, body, types.FlagTypeNone, codelocation.New(1))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can focus the tests within a describe block using FDescribe
|
||||||
|
func FDescribe(text string, body func()) bool {
|
||||||
|
globalSuite.PushContainerNode(text, body, types.FlagTypeFocused, codelocation.New(1))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark the tests within a describe block as pending using PDescribe
|
||||||
|
func PDescribe(text string, body func()) bool {
|
||||||
|
globalSuite.PushContainerNode(text, body, types.FlagTypePending, codelocation.New(1))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark the tests within a describe block as pending using XDescribe
|
||||||
|
func XDescribe(text string, body func()) bool {
|
||||||
|
globalSuite.PushContainerNode(text, body, types.FlagTypePending, codelocation.New(1))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//Context blocks allow you to organize your specs. A Context block can contain any number of
|
||||||
|
//BeforeEach, AfterEach, JustBeforeEach, It, and Measurement blocks.
|
||||||
|
//
|
||||||
|
//In addition you can nest Describe and Context blocks. Describe and Context blocks are functionally
|
||||||
|
//equivalent. The difference is purely semantic -- you typical Describe the behavior of an object
|
||||||
|
//or method and, within that Describe, outline a number of Contexts.
|
||||||
|
func Context(text string, body func()) bool {
|
||||||
|
globalSuite.PushContainerNode(text, body, types.FlagTypeNone, codelocation.New(1))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can focus the tests within a describe block using FContext
|
||||||
|
func FContext(text string, body func()) bool {
|
||||||
|
globalSuite.PushContainerNode(text, body, types.FlagTypeFocused, codelocation.New(1))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark the tests within a describe block as pending using PContext
|
||||||
|
func PContext(text string, body func()) bool {
|
||||||
|
globalSuite.PushContainerNode(text, body, types.FlagTypePending, codelocation.New(1))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark the tests within a describe block as pending using XContext
|
||||||
|
func XContext(text string, body func()) bool {
|
||||||
|
globalSuite.PushContainerNode(text, body, types.FlagTypePending, codelocation.New(1))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//It blocks contain your test code and assertions. You cannot nest any other Ginkgo blocks
|
||||||
|
//within an It block.
|
||||||
|
//
|
||||||
|
//Ginkgo will normally run It blocks synchronously. To perform asynchronous tests, pass a
|
||||||
|
//function that accepts a Done channel. When you do this, you can also provide an optional timeout.
|
||||||
|
func It(text string, body interface{}, timeout ...float64) bool {
|
||||||
|
globalSuite.PushItNode(text, body, types.FlagTypeNone, codelocation.New(1), parseTimeout(timeout...))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can focus individual Its using FIt
|
||||||
|
func FIt(text string, body interface{}, timeout ...float64) bool {
|
||||||
|
globalSuite.PushItNode(text, body, types.FlagTypeFocused, codelocation.New(1), parseTimeout(timeout...))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark Its as pending using PIt
|
||||||
|
func PIt(text string, _ ...interface{}) bool {
|
||||||
|
globalSuite.PushItNode(text, func() {}, types.FlagTypePending, codelocation.New(1), 0)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark Its as pending using XIt
|
||||||
|
func XIt(text string, _ ...interface{}) bool {
|
||||||
|
globalSuite.PushItNode(text, func() {}, types.FlagTypePending, codelocation.New(1), 0)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//Specify blocks are aliases for It blocks and allow for more natural wording in situations
|
||||||
|
//which "It" does not fit into a natural sentence flow. All the same protocols apply for Specify blocks
|
||||||
|
//which apply to It blocks.
|
||||||
|
func Specify(text string, body interface{}, timeout ...float64) bool {
|
||||||
|
return It(text, body, timeout...)
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can focus individual Specifys using FSpecify
|
||||||
|
func FSpecify(text string, body interface{}, timeout ...float64) bool {
|
||||||
|
return FIt(text, body, timeout...)
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark Specifys as pending using PSpecify
|
||||||
|
func PSpecify(text string, is ...interface{}) bool {
|
||||||
|
return PIt(text, is...)
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark Specifys as pending using XSpecify
|
||||||
|
func XSpecify(text string, is ...interface{}) bool {
|
||||||
|
return XIt(text, is...)
|
||||||
|
}
|
||||||
|
|
||||||
|
//By allows you to better document large Its.
|
||||||
|
//
|
||||||
|
//Generally you should try to keep your Its short and to the point. This is not always possible, however,
|
||||||
|
//especially in the context of integration tests that capture a particular workflow.
|
||||||
|
//
|
||||||
|
//By allows you to document such flows. By must be called within a runnable node (It, BeforeEach, Measure, etc...)
|
||||||
|
//By will simply log the passed in text to the GinkgoWriter. If By is handed a function it will immediately run the function.
|
||||||
|
func By(text string, callbacks ...func()) {
|
||||||
|
preamble := "\x1b[1mSTEP\x1b[0m"
|
||||||
|
if config.DefaultReporterConfig.NoColor {
|
||||||
|
preamble = "STEP"
|
||||||
|
}
|
||||||
|
fmt.Fprintln(GinkgoWriter, preamble+": "+text)
|
||||||
|
if len(callbacks) == 1 {
|
||||||
|
callbacks[0]()
|
||||||
|
}
|
||||||
|
if len(callbacks) > 1 {
|
||||||
|
panic("just one callback per By, please")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Measure blocks run the passed in body function repeatedly (determined by the samples argument)
|
||||||
|
//and accumulate metrics provided to the Benchmarker by the body function.
|
||||||
|
//
|
||||||
|
//The body function must have the signature:
|
||||||
|
// func(b Benchmarker)
|
||||||
|
func Measure(text string, body interface{}, samples int) bool {
|
||||||
|
globalSuite.PushMeasureNode(text, body, types.FlagTypeNone, codelocation.New(1), samples)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can focus individual Measures using FMeasure
|
||||||
|
func FMeasure(text string, body interface{}, samples int) bool {
|
||||||
|
globalSuite.PushMeasureNode(text, body, types.FlagTypeFocused, codelocation.New(1), samples)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark Maeasurements as pending using PMeasure
|
||||||
|
func PMeasure(text string, _ ...interface{}) bool {
|
||||||
|
globalSuite.PushMeasureNode(text, func(b Benchmarker) {}, types.FlagTypePending, codelocation.New(1), 0)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//You can mark Maeasurements as pending using XMeasure
|
||||||
|
func XMeasure(text string, _ ...interface{}) bool {
|
||||||
|
globalSuite.PushMeasureNode(text, func(b Benchmarker) {}, types.FlagTypePending, codelocation.New(1), 0)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//BeforeSuite blocks are run just once before any specs are run. When running in parallel, each
|
||||||
|
//parallel node process will call BeforeSuite.
|
||||||
|
//
|
||||||
|
//BeforeSuite blocks can be made asynchronous by providing a body function that accepts a Done channel
|
||||||
|
//
|
||||||
|
//You may only register *one* BeforeSuite handler per test suite. You typically do so in your bootstrap file at the top level.
|
||||||
|
func BeforeSuite(body interface{}, timeout ...float64) bool {
|
||||||
|
globalSuite.SetBeforeSuiteNode(body, codelocation.New(1), parseTimeout(timeout...))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//AfterSuite blocks are *always* run after all the specs regardless of whether specs have passed or failed.
|
||||||
|
//Moreover, if Ginkgo receives an interrupt signal (^C) it will attempt to run the AfterSuite before exiting.
|
||||||
|
//
|
||||||
|
//When running in parallel, each parallel node process will call AfterSuite.
|
||||||
|
//
|
||||||
|
//AfterSuite blocks can be made asynchronous by providing a body function that accepts a Done channel
|
||||||
|
//
|
||||||
|
//You may only register *one* AfterSuite handler per test suite. You typically do so in your bootstrap file at the top level.
|
||||||
|
func AfterSuite(body interface{}, timeout ...float64) bool {
|
||||||
|
globalSuite.SetAfterSuiteNode(body, codelocation.New(1), parseTimeout(timeout...))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//SynchronizedBeforeSuite blocks are primarily meant to solve the problem of setting up singleton external resources shared across
|
||||||
|
//nodes when running tests in parallel. For example, say you have a shared database that you can only start one instance of that
|
||||||
|
//must be used in your tests. When running in parallel, only one node should set up the database and all other nodes should wait
|
||||||
|
//until that node is done before running.
|
||||||
|
//
|
||||||
|
//SynchronizedBeforeSuite accomplishes this by taking *two* function arguments. The first is only run on parallel node #1. The second is
|
||||||
|
//run on all nodes, but *only* after the first function completes succesfully. Ginkgo also makes it possible to send data from the first function (on Node 1)
|
||||||
|
//to the second function (on all the other nodes).
|
||||||
|
//
|
||||||
|
//The functions have the following signatures. The first function (which only runs on node 1) has the signature:
|
||||||
|
//
|
||||||
|
// func() []byte
|
||||||
|
//
|
||||||
|
//or, to run asynchronously:
|
||||||
|
//
|
||||||
|
// func(done Done) []byte
|
||||||
|
//
|
||||||
|
//The byte array returned by the first function is then passed to the second function, which has the signature:
|
||||||
|
//
|
||||||
|
// func(data []byte)
|
||||||
|
//
|
||||||
|
//or, to run asynchronously:
|
||||||
|
//
|
||||||
|
// func(data []byte, done Done)
|
||||||
|
//
|
||||||
|
//Here's a simple pseudo-code example that starts a shared database on Node 1 and shares the database's address with the other nodes:
|
||||||
|
//
|
||||||
|
// var dbClient db.Client
|
||||||
|
// var dbRunner db.Runner
|
||||||
|
//
|
||||||
|
// var _ = SynchronizedBeforeSuite(func() []byte {
|
||||||
|
// dbRunner = db.NewRunner()
|
||||||
|
// err := dbRunner.Start()
|
||||||
|
// Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
// return []byte(dbRunner.URL)
|
||||||
|
// }, func(data []byte) {
|
||||||
|
// dbClient = db.NewClient()
|
||||||
|
// err := dbClient.Connect(string(data))
|
||||||
|
// Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
// })
|
||||||
|
func SynchronizedBeforeSuite(node1Body interface{}, allNodesBody interface{}, timeout ...float64) bool {
|
||||||
|
globalSuite.SetSynchronizedBeforeSuiteNode(
|
||||||
|
node1Body,
|
||||||
|
allNodesBody,
|
||||||
|
codelocation.New(1),
|
||||||
|
parseTimeout(timeout...),
|
||||||
|
)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//SynchronizedAfterSuite blocks complement the SynchronizedBeforeSuite blocks in solving the problem of setting up
|
||||||
|
//external singleton resources shared across nodes when running tests in parallel.
|
||||||
|
//
|
||||||
|
//SynchronizedAfterSuite accomplishes this by taking *two* function arguments. The first runs on all nodes. The second runs only on parallel node #1
|
||||||
|
//and *only* after all other nodes have finished and exited. This ensures that node 1, and any resources it is running, remain alive until
|
||||||
|
//all other nodes are finished.
|
||||||
|
//
|
||||||
|
//Both functions have the same signature: either func() or func(done Done) to run asynchronously.
|
||||||
|
//
|
||||||
|
//Here's a pseudo-code example that complements that given in SynchronizedBeforeSuite. Here, SynchronizedAfterSuite is used to tear down the shared database
|
||||||
|
//only after all nodes have finished:
|
||||||
|
//
|
||||||
|
// var _ = SynchronizedAfterSuite(func() {
|
||||||
|
// dbClient.Cleanup()
|
||||||
|
// }, func() {
|
||||||
|
// dbRunner.Stop()
|
||||||
|
// })
|
||||||
|
func SynchronizedAfterSuite(allNodesBody interface{}, node1Body interface{}, timeout ...float64) bool {
|
||||||
|
globalSuite.SetSynchronizedAfterSuiteNode(
|
||||||
|
allNodesBody,
|
||||||
|
node1Body,
|
||||||
|
codelocation.New(1),
|
||||||
|
parseTimeout(timeout...),
|
||||||
|
)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//BeforeEach blocks are run before It blocks. When multiple BeforeEach blocks are defined in nested
|
||||||
|
//Describe and Context blocks the outermost BeforeEach blocks are run first.
|
||||||
|
//
|
||||||
|
//Like It blocks, BeforeEach blocks can be made asynchronous by providing a body function that accepts
|
||||||
|
//a Done channel
|
||||||
|
func BeforeEach(body interface{}, timeout ...float64) bool {
|
||||||
|
globalSuite.PushBeforeEachNode(body, codelocation.New(1), parseTimeout(timeout...))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//JustBeforeEach blocks are run before It blocks but *after* all BeforeEach blocks. For more details,
|
||||||
|
//read the [documentation](http://onsi.github.io/ginkgo/#separating_creation_and_configuration_)
|
||||||
|
//
|
||||||
|
//Like It blocks, BeforeEach blocks can be made asynchronous by providing a body function that accepts
|
||||||
|
//a Done channel
|
||||||
|
func JustBeforeEach(body interface{}, timeout ...float64) bool {
|
||||||
|
globalSuite.PushJustBeforeEachNode(body, codelocation.New(1), parseTimeout(timeout...))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
//AfterEach blocks are run after It blocks. When multiple AfterEach blocks are defined in nested
|
||||||
|
//Describe and Context blocks the innermost AfterEach blocks are run first.
|
||||||
|
//
|
||||||
|
//Like It blocks, AfterEach blocks can be made asynchronous by providing a body function that accepts
|
||||||
|
//a Done channel
|
||||||
|
func AfterEach(body interface{}, timeout ...float64) bool {
|
||||||
|
globalSuite.PushAfterEachNode(body, codelocation.New(1), parseTimeout(timeout...))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseTimeout(timeout ...float64) time.Duration {
|
||||||
|
if len(timeout) == 0 {
|
||||||
|
return time.Duration(defaultTimeout * int64(time.Second))
|
||||||
|
} else {
|
||||||
|
return time.Duration(timeout[0] * float64(time.Second))
|
||||||
|
}
|
||||||
|
}
|
121
vendor/github.com/onsi/ginkgo/integration/convert_test.go
generated
vendored
Normal file
121
vendor/github.com/onsi/ginkgo/integration/convert_test.go
generated
vendored
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("ginkgo convert", func() {
|
||||||
|
var tmpDir string
|
||||||
|
|
||||||
|
readConvertedFileNamed := func(pathComponents ...string) string {
|
||||||
|
pathToFile := filepath.Join(tmpDir, "convert_fixtures", filepath.Join(pathComponents...))
|
||||||
|
bytes, err := ioutil.ReadFile(pathToFile)
|
||||||
|
ExpectWithOffset(1, err).NotTo(HaveOccurred())
|
||||||
|
|
||||||
|
return string(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
readGoldMasterNamed := func(filename string) string {
|
||||||
|
bytes, err := ioutil.ReadFile(filepath.Join("_fixtures", "convert_goldmasters", filename))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
return string(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
tmpDir, err = ioutil.TempDir("", "ginkgo-convert")
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
err = exec.Command("cp", "-r", filepath.Join("_fixtures", "convert_fixtures"), tmpDir).Run()
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
JustBeforeEach(func() {
|
||||||
|
cwd, err := os.Getwd()
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
relPath, err := filepath.Rel(cwd, filepath.Join(tmpDir, "convert_fixtures"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
cmd := exec.Command(pathToGinkgo, "convert", relPath)
|
||||||
|
cmd.Env = os.Environ()
|
||||||
|
for i, env := range cmd.Env {
|
||||||
|
if strings.HasPrefix(env, "PATH") {
|
||||||
|
cmd.Env[i] = cmd.Env[i] + ":" + filepath.Dir(pathToGinkgo)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err = cmd.Run()
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
err := os.RemoveAll(tmpDir)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("rewrites xunit tests as ginkgo tests", func() {
|
||||||
|
convertedFile := readConvertedFileNamed("xunit_test.go")
|
||||||
|
goldMaster := readGoldMasterNamed("xunit_test.go")
|
||||||
|
Ω(convertedFile).Should(Equal(goldMaster))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("rewrites all usages of *testing.T as mr.T()", func() {
|
||||||
|
convertedFile := readConvertedFileNamed("extra_functions_test.go")
|
||||||
|
goldMaster := readGoldMasterNamed("extra_functions_test.go")
|
||||||
|
Ω(convertedFile).Should(Equal(goldMaster))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("rewrites tests in the package dir that belong to other packages", func() {
|
||||||
|
convertedFile := readConvertedFileNamed("outside_package_test.go")
|
||||||
|
goldMaster := readGoldMasterNamed("outside_package_test.go")
|
||||||
|
Ω(convertedFile).Should(Equal(goldMaster))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("rewrites tests in nested packages", func() {
|
||||||
|
convertedFile := readConvertedFileNamed("nested", "nested_test.go")
|
||||||
|
goldMaster := readGoldMasterNamed("nested_test.go")
|
||||||
|
Ω(convertedFile).Should(Equal(goldMaster))
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("ginkgo test suite files", func() {
|
||||||
|
It("creates a ginkgo test suite file for the package you specified", func() {
|
||||||
|
testsuite := readConvertedFileNamed("convert_fixtures_suite_test.go")
|
||||||
|
goldMaster := readGoldMasterNamed("suite_test.go")
|
||||||
|
Ω(testsuite).Should(Equal(goldMaster))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("converts go tests in deeply nested packages (some may not contain go files)", func() {
|
||||||
|
testsuite := readConvertedFileNamed("nested_without_gofiles", "subpackage", "nested_subpackage_test.go")
|
||||||
|
goldMaster := readGoldMasterNamed("nested_subpackage_test.go")
|
||||||
|
Ω(testsuite).Should(Equal(goldMaster))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("creates ginkgo test suites for all nested packages", func() {
|
||||||
|
testsuite := readConvertedFileNamed("nested", "nested_suite_test.go")
|
||||||
|
goldMaster := readGoldMasterNamed("nested_suite_test.go")
|
||||||
|
Ω(testsuite).Should(Equal(goldMaster))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with an existing test suite file", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
goldMaster := readGoldMasterNamed("fixtures_suite_test.go")
|
||||||
|
err := ioutil.WriteFile(filepath.Join(tmpDir, "convert_fixtures", "tmp_suite_test.go"), []byte(goldMaster), 0600)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("gracefully handles existing test suite files", func() {
|
||||||
|
//nothing should have gone wrong!
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
54
vendor/github.com/onsi/ginkgo/integration/coverage_test.go
generated
vendored
Normal file
54
vendor/github.com/onsi/ginkgo/integration/coverage_test.go
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Coverage Specs", func() {
|
||||||
|
AfterEach(func() {
|
||||||
|
os.RemoveAll("./_fixtures/coverage_fixture/coverage_fixture.coverprofile")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("runs coverage analysis in series and in parallel", func() {
|
||||||
|
session := startGinkgo("./_fixtures/coverage_fixture", "-cover")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
Ω(output).Should(ContainSubstring("coverage: 80.0% of statements"))
|
||||||
|
|
||||||
|
serialCoverProfileOutput, err := exec.Command("go", "tool", "cover", "-func=./_fixtures/coverage_fixture/coverage_fixture.coverprofile").CombinedOutput()
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
os.RemoveAll("./_fixtures/coverage_fixture/coverage_fixture.coverprofile")
|
||||||
|
|
||||||
|
Eventually(startGinkgo("./_fixtures/coverage_fixture", "-cover", "-nodes=4")).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
parallelCoverProfileOutput, err := exec.Command("go", "tool", "cover", "-func=./_fixtures/coverage_fixture/coverage_fixture.coverprofile").CombinedOutput()
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
Ω(parallelCoverProfileOutput).Should(Equal(serialCoverProfileOutput))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("runs coverage analysis on external packages in series and in parallel", func() {
|
||||||
|
session := startGinkgo("./_fixtures/coverage_fixture", "-coverpkg=github.com/onsi/ginkgo/integration/_fixtures/coverage_fixture,github.com/onsi/ginkgo/integration/_fixtures/coverage_fixture/external_coverage_fixture")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
Ω(output).Should(ContainSubstring("coverage: 71.4% of statements in github.com/onsi/ginkgo/integration/_fixtures/coverage_fixture, github.com/onsi/ginkgo/integration/_fixtures/coverage_fixture/external_coverage_fixture"))
|
||||||
|
|
||||||
|
serialCoverProfileOutput, err := exec.Command("go", "tool", "cover", "-func=./_fixtures/coverage_fixture/coverage_fixture.coverprofile").CombinedOutput()
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
os.RemoveAll("./_fixtures/coverage_fixture/coverage_fixture.coverprofile")
|
||||||
|
|
||||||
|
Eventually(startGinkgo("./_fixtures/coverage_fixture", "-coverpkg=github.com/onsi/ginkgo/integration/_fixtures/coverage_fixture,github.com/onsi/ginkgo/integration/_fixtures/coverage_fixture/external_coverage_fixture", "-nodes=4")).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
parallelCoverProfileOutput, err := exec.Command("go", "tool", "cover", "-func=./_fixtures/coverage_fixture/coverage_fixture.coverprofile").CombinedOutput()
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
Ω(parallelCoverProfileOutput).Should(Equal(serialCoverProfileOutput))
|
||||||
|
})
|
||||||
|
})
|
48
vendor/github.com/onsi/ginkgo/integration/fail_test.go
generated
vendored
Normal file
48
vendor/github.com/onsi/ginkgo/integration/fail_test.go
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Failing Specs", func() {
|
||||||
|
var pathToTest string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("failing")
|
||||||
|
copyIn("fail_fixture", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail in all the possible ways", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS"))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("a top level failure on line 9"))
|
||||||
|
Ω(output).Should(ContainSubstring("fail_fixture_test.go:9"))
|
||||||
|
Ω(output).Should(ContainSubstring("an async top level failure on line 14"))
|
||||||
|
Ω(output).Should(ContainSubstring("fail_fixture_test.go:14"))
|
||||||
|
Ω(output).Should(ContainSubstring("a top level goroutine failure on line 21"))
|
||||||
|
Ω(output).Should(ContainSubstring("fail_fixture_test.go:21"))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("a sync failure"))
|
||||||
|
Ω(output).Should(MatchRegexp(`Test Panicked\n\s+a sync panic`))
|
||||||
|
Ω(output).Should(ContainSubstring("a sync FAIL failure"))
|
||||||
|
Ω(output).Should(ContainSubstring("async timeout [It]"))
|
||||||
|
Ω(output).Should(ContainSubstring("Timed out"))
|
||||||
|
Ω(output).Should(ContainSubstring("an async failure"))
|
||||||
|
Ω(output).Should(MatchRegexp(`Test Panicked\n\s+an async panic`))
|
||||||
|
Ω(output).Should(ContainSubstring("an async FAIL failure"))
|
||||||
|
Ω(output).Should(ContainSubstring("a goroutine FAIL failure"))
|
||||||
|
Ω(output).Should(ContainSubstring("a goroutine failure"))
|
||||||
|
Ω(output).Should(MatchRegexp(`Test Panicked\n\s+a goroutine panic`))
|
||||||
|
Ω(output).Should(ContainSubstring("a measure failure"))
|
||||||
|
Ω(output).Should(ContainSubstring("a measure FAIL failure"))
|
||||||
|
Ω(output).Should(MatchRegexp(`Test Panicked\n\s+a measure panic`))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("0 Passed | 16 Failed"))
|
||||||
|
})
|
||||||
|
})
|
231
vendor/github.com/onsi/ginkgo/integration/flags_test.go
generated
vendored
Normal file
231
vendor/github.com/onsi/ginkgo/integration/flags_test.go
generated
vendored
Normal file
@ -0,0 +1,231 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Flags Specs", func() {
|
||||||
|
var pathToTest string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("flags")
|
||||||
|
copyIn("flags_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
getRandomOrders := func(output string) []int {
|
||||||
|
return []int{strings.Index(output, "RANDOM_A"), strings.Index(output, "RANDOM_B"), strings.Index(output, "RANDOM_C")}
|
||||||
|
}
|
||||||
|
|
||||||
|
It("normally passes, runs measurements, prints out noisy pendings, does not randomize tests, and honors the programmatic focus", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Ran 3 samples:"), "has a measurement")
|
||||||
|
Ω(output).Should(ContainSubstring("11 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||||
|
Ω(output).Should(ContainSubstring("1 Pending"))
|
||||||
|
Ω(output).Should(ContainSubstring("3 Skipped"))
|
||||||
|
Ω(output).Should(ContainSubstring("[PENDING]"))
|
||||||
|
Ω(output).Should(ContainSubstring("marshmallow"))
|
||||||
|
Ω(output).Should(ContainSubstring("chocolate"))
|
||||||
|
Ω(output).Should(ContainSubstring("CUSTOM_FLAG: default"))
|
||||||
|
Ω(output).Should(ContainSubstring("Detected Programmatic Focus - setting exit status to %d", types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("smores"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("SLOW TEST"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("should honor -slowSpecThreshold"))
|
||||||
|
|
||||||
|
orders := getRandomOrders(output)
|
||||||
|
Ω(orders[0]).Should(BeNumerically("<", orders[1]))
|
||||||
|
Ω(orders[1]).Should(BeNumerically("<", orders[2]))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run a coverprofile when passed -cover", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--cover", "--focus=the focused set")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
_, err := os.Stat(filepath.Join(pathToTest, "flags.coverprofile"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(output).Should(ContainSubstring("coverage: "))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail when there are pending tests and it is passed --failOnPending", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--failOnPending")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not print out pendings when --noisyPendings=false", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--noisyPendings=false")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("[PENDING]"))
|
||||||
|
Ω(output).Should(ContainSubstring("1 Pending"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should override the programmatic focus when told to focus", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--focus=smores")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("marshmallow"))
|
||||||
|
Ω(output).Should(ContainSubstring("chocolate"))
|
||||||
|
Ω(output).Should(ContainSubstring("smores"))
|
||||||
|
Ω(output).Should(ContainSubstring("3 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Pending"))
|
||||||
|
Ω(output).Should(ContainSubstring("12 Skipped"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should override the programmatic focus when told to skip", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--skip=marshmallow|failing|flaky")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("marshmallow"))
|
||||||
|
Ω(output).Should(ContainSubstring("chocolate"))
|
||||||
|
Ω(output).Should(ContainSubstring("smores"))
|
||||||
|
Ω(output).Should(ContainSubstring("11 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||||
|
Ω(output).Should(ContainSubstring("1 Pending"))
|
||||||
|
Ω(output).Should(ContainSubstring("3 Skipped"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the race detector when told to", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--race")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("WARNING: DATA RACE"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should randomize tests when told to", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--randomizeAllSpecs", "--seed=17")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
orders := getRandomOrders(output)
|
||||||
|
Ω(orders[0]).ShouldNot(BeNumerically("<", orders[1]))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should skip measurements when told to", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--skipMeasurements")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("Ran 3 samples:"), "has a measurement")
|
||||||
|
Ω(output).Should(ContainSubstring("4 Skipped"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should watch for slow specs", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--slowSpecThreshold=0.05")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("SLOW TEST"))
|
||||||
|
Ω(output).Should(ContainSubstring("should honor -slowSpecThreshold"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should pass additional arguments in", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--", "--customFlag=madagascar")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("CUSTOM_FLAG: madagascar"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should print out full stack traces for failures when told to", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--focus=a failing test", "--trace")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Full Stack Trace"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail fast when told to", func() {
|
||||||
|
pathToTest = tmpPath("fail")
|
||||||
|
copyIn("fail_fixture", pathToTest)
|
||||||
|
session := startGinkgo(pathToTest, "--failFast")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("1 Failed"))
|
||||||
|
Ω(output).Should(ContainSubstring("15 Skipped"))
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with a flaky test", func() {
|
||||||
|
It("should normally fail", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--focus=flaky")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should pass if retries are requested", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--focus=flaky --flakeAttempts=2")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should perform a dry run when told to", func() {
|
||||||
|
pathToTest = tmpPath("fail")
|
||||||
|
copyIn("fail_fixture", pathToTest)
|
||||||
|
session := startGinkgo(pathToTest, "--dryRun", "-v")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("synchronous failures"))
|
||||||
|
Ω(output).Should(ContainSubstring("16 Specs"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||||
|
})
|
||||||
|
|
||||||
|
regextest := func(regexOption string, skipOrFocus string) string {
|
||||||
|
pathToTest = tmpPath("passing")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
session := startGinkgo(pathToTest, regexOption, "--dryRun", "-v", skipOrFocus)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
return string(session.Out.Contents())
|
||||||
|
}
|
||||||
|
|
||||||
|
It("regexScansFilePath (enabled) should skip and focus on file names", func() {
|
||||||
|
output := regextest("-regexScansFilePath=true", "-skip=/passing/") // everything gets skipped (nothing runs)
|
||||||
|
Ω(output).Should(ContainSubstring("0 of 4 Specs"))
|
||||||
|
output = regextest("-regexScansFilePath=true", "-focus=/passing/") // everything gets focused (everything runs)
|
||||||
|
Ω(output).Should(ContainSubstring("4 of 4 Specs"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("regexScansFilePath (disabled) should not effect normal filtering", func() {
|
||||||
|
output := regextest("-regexScansFilePath=false", "-skip=/passing/") // nothing gets skipped (everything runs)
|
||||||
|
Ω(output).Should(ContainSubstring("4 of 4 Specs"))
|
||||||
|
output = regextest("-regexScansFilePath=false", "-focus=/passing/") // nothing gets focused (nothing runs)
|
||||||
|
Ω(output).Should(ContainSubstring("0 of 4 Specs"))
|
||||||
|
})
|
||||||
|
It("should honor compiler flags", func() {
|
||||||
|
session := startGinkgo(pathToTest, "-gcflags=-importmap 'math=math/cmplx'")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
Ω(output).Should(ContainSubstring("NaN returns complex128"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should honor covermode flag", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--covermode=count", "--focus=the focused set")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
Ω(output).Should(ContainSubstring("coverage: "))
|
||||||
|
|
||||||
|
coverageFile := filepath.Join(pathToTest, "flags.coverprofile")
|
||||||
|
_, err := os.Stat(coverageFile)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
contents, err := ioutil.ReadFile(coverageFile)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(contents).Should(ContainSubstring("mode: count"))
|
||||||
|
})
|
||||||
|
})
|
1
vendor/github.com/onsi/ginkgo/integration/integration.go
generated
vendored
Normal file
1
vendor/github.com/onsi/ginkgo/integration/integration.go
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
package integration
|
91
vendor/github.com/onsi/ginkgo/integration/integration_suite_test.go
generated
vendored
Normal file
91
vendor/github.com/onsi/ginkgo/integration/integration_suite_test.go
generated
vendored
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var tmpDir string
|
||||||
|
var pathToGinkgo string
|
||||||
|
|
||||||
|
func TestIntegration(t *testing.T) {
|
||||||
|
SetDefaultEventuallyTimeout(15 * time.Second)
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Integration Suite")
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ = SynchronizedBeforeSuite(func() []byte {
|
||||||
|
pathToGinkgo, err := gexec.Build("github.com/onsi/ginkgo/ginkgo")
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
return []byte(pathToGinkgo)
|
||||||
|
}, func(computedPathToGinkgo []byte) {
|
||||||
|
pathToGinkgo = string(computedPathToGinkgo)
|
||||||
|
})
|
||||||
|
|
||||||
|
var _ = BeforeEach(func() {
|
||||||
|
var err error
|
||||||
|
tmpDir, err = ioutil.TempDir("", "ginkgo-run")
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
var _ = AfterEach(func() {
|
||||||
|
err := os.RemoveAll(tmpDir)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
var _ = SynchronizedAfterSuite(func() {}, func() {
|
||||||
|
gexec.CleanupBuildArtifacts()
|
||||||
|
})
|
||||||
|
|
||||||
|
func tmpPath(destination string) string {
|
||||||
|
return filepath.Join(tmpDir, destination)
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyIn(fixture string, destination string) {
|
||||||
|
err := os.MkdirAll(destination, 0777)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
filepath.Walk(filepath.Join("_fixtures", fixture), func(path string, info os.FileInfo, err error) error {
|
||||||
|
if info.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
base := filepath.Base(path)
|
||||||
|
|
||||||
|
src, err := os.Open(path)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
defer src.Close()
|
||||||
|
|
||||||
|
dst, err := os.Create(filepath.Join(destination, base))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
defer dst.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(dst, src)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func ginkgoCommand(dir string, args ...string) *exec.Cmd {
|
||||||
|
cmd := exec.Command(pathToGinkgo, args...)
|
||||||
|
cmd.Dir = dir
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
}
|
||||||
|
|
||||||
|
func startGinkgo(dir string, args ...string) *gexec.Session {
|
||||||
|
cmd := ginkgoCommand(dir, args...)
|
||||||
|
session, err := gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
return session
|
||||||
|
}
|
51
vendor/github.com/onsi/ginkgo/integration/interrupt_test.go
generated
vendored
Normal file
51
vendor/github.com/onsi/ginkgo/integration/interrupt_test.go
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os/exec"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gbytes"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Interrupt", func() {
|
||||||
|
var pathToTest string
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("hanging")
|
||||||
|
copyIn("hanging_suite", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when interrupting a suite", func() {
|
||||||
|
var session *gexec.Session
|
||||||
|
BeforeEach(func() {
|
||||||
|
//we need to signal the actual process, so we must compile the test first
|
||||||
|
var err error
|
||||||
|
cmd := exec.Command("go", "test", "-c")
|
||||||
|
cmd.Dir = pathToTest
|
||||||
|
session, err = gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
//then run the compiled test directly
|
||||||
|
cmd = exec.Command("./hanging.test", "--test.v=true", "--ginkgo.noColor")
|
||||||
|
cmd.Dir = pathToTest
|
||||||
|
session, err = gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
Eventually(session).Should(gbytes.Say("Sleeping..."))
|
||||||
|
session.Interrupt()
|
||||||
|
Eventually(session, 1000).Should(gexec.Exit(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should emit the contents of the GinkgoWriter", func() {
|
||||||
|
Ω(session).Should(gbytes.Say("Just beginning"))
|
||||||
|
Ω(session).Should(gbytes.Say("Almost there..."))
|
||||||
|
Ω(session).Should(gbytes.Say("Hanging Out"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the AfterSuite", func() {
|
||||||
|
Ω(session).Should(gbytes.Say("Heading Out After Suite"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
53
vendor/github.com/onsi/ginkgo/integration/precompiled_test.go
generated
vendored
Normal file
53
vendor/github.com/onsi/ginkgo/integration/precompiled_test.go
generated
vendored
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gbytes"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("ginkgo build", func() {
|
||||||
|
var pathToTest string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("passing_ginkgo_tests")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
session := startGinkgo(pathToTest, "build")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
Ω(output).Should(ContainSubstring("Compiling passing_ginkgo_tests"))
|
||||||
|
Ω(output).Should(ContainSubstring("compiled passing_ginkgo_tests.test"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should build a test binary", func() {
|
||||||
|
_, err := os.Stat(filepath.Join(pathToTest, "passing_ginkgo_tests.test"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be possible to run the test binary directly", func() {
|
||||||
|
cmd := exec.Command("./passing_ginkgo_tests.test")
|
||||||
|
cmd.Dir = pathToTest
|
||||||
|
session, err := gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
Ω(session).Should(gbytes.Say("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be possible to run the test binary via ginkgo", func() {
|
||||||
|
session := startGinkgo(pathToTest, "./passing_ginkgo_tests.test")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
Ω(session).Should(gbytes.Say("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be possible to run the test binary in parallel", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--nodes=4", "--noColor", "./passing_ginkgo_tests.test")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
Ω(session).Should(gbytes.Say("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(session).Should(gbytes.Say("Running in parallel across 4 nodes"))
|
||||||
|
})
|
||||||
|
})
|
75
vendor/github.com/onsi/ginkgo/integration/progress_test.go
generated
vendored
Normal file
75
vendor/github.com/onsi/ginkgo/integration/progress_test.go
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gbytes"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Emitting progress", func() {
|
||||||
|
var pathToTest string
|
||||||
|
var session *gexec.Session
|
||||||
|
var args []string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
args = []string{"--noColor"}
|
||||||
|
pathToTest = tmpPath("progress")
|
||||||
|
copyIn("progress_fixture", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
JustBeforeEach(func() {
|
||||||
|
session = startGinkgo(pathToTest, args...)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with the -progress flag, but no -v flag", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
args = append(args, "-progress")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not emit progress", func() {
|
||||||
|
Ω(session).ShouldNot(gbytes.Say("[bB]efore"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with the -v flag", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
args = append(args, "-v")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not emit progress", func() {
|
||||||
|
Ω(session).ShouldNot(gbytes.Say(`\[BeforeEach\]`))
|
||||||
|
Ω(session).Should(gbytes.Say(`>outer before<`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with the -progress flag and the -v flag", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
args = append(args, "-progress", "-v")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should emit progress (by writing to the GinkgoWriter)", func() {
|
||||||
|
Ω(session).Should(gbytes.Say(`\[BeforeEach\] ProgressFixture`))
|
||||||
|
Ω(session).Should(gbytes.Say(`>outer before<`))
|
||||||
|
|
||||||
|
Ω(session).Should(gbytes.Say(`\[BeforeEach\] Inner Context`))
|
||||||
|
Ω(session).Should(gbytes.Say(`>inner before<`))
|
||||||
|
|
||||||
|
Ω(session).Should(gbytes.Say(`\[JustBeforeEach\] ProgressFixture`))
|
||||||
|
Ω(session).Should(gbytes.Say(`>outer just before<`))
|
||||||
|
|
||||||
|
Ω(session).Should(gbytes.Say(`\[JustBeforeEach\] Inner Context`))
|
||||||
|
Ω(session).Should(gbytes.Say(`>inner just before<`))
|
||||||
|
|
||||||
|
Ω(session).Should(gbytes.Say(`\[It\] should emit progress as it goes`))
|
||||||
|
Ω(session).Should(gbytes.Say(`>it<`))
|
||||||
|
|
||||||
|
Ω(session).Should(gbytes.Say(`\[AfterEach\] Inner Context`))
|
||||||
|
Ω(session).Should(gbytes.Say(`>inner after<`))
|
||||||
|
|
||||||
|
Ω(session).Should(gbytes.Say(`\[AfterEach\] ProgressFixture`))
|
||||||
|
Ω(session).Should(gbytes.Say(`>outer after<`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
399
vendor/github.com/onsi/ginkgo/integration/run_test.go
generated
vendored
Normal file
399
vendor/github.com/onsi/ginkgo/integration/run_test.go
generated
vendored
Normal file
@ -0,0 +1,399 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gbytes"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Running Specs", func() {
|
||||||
|
var pathToTest string
|
||||||
|
|
||||||
|
isWindows := (runtime.GOOS == "windows")
|
||||||
|
denoter := "•"
|
||||||
|
|
||||||
|
if isWindows {
|
||||||
|
denoter = "+"
|
||||||
|
}
|
||||||
|
|
||||||
|
Context("when pointed at the current directory", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the tests in the working directory", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring(strings.Repeat(denoter, 4)))
|
||||||
|
Ω(output).Should(ContainSubstring("SUCCESS! -- 4 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when passed an explicit package to run", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the ginkgo style tests", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", pathToTest)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring(strings.Repeat(denoter, 4)))
|
||||||
|
Ω(output).Should(ContainSubstring("SUCCESS! -- 4 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when passed a number of packages to run", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
otherPathToTest := tmpPath("other")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the ginkgo style tests", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "--succinct=false", "ginkgo", "./other")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when passed a number of packages to run, some of which have focused tests", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
otherPathToTest := tmpPath("other")
|
||||||
|
focusedPathToTest := tmpPath("focused")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||||
|
copyIn("focused_fixture", focusedPathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should exit with a status code of 2 and explain why", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "--succinct=false", "-r")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("Detected Programmatic Focus - setting exit status to %d", types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when told to skipPackages", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
otherPathToTest := tmpPath("other")
|
||||||
|
focusedPathToTest := tmpPath("focused")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||||
|
copyIn("focused_fixture", focusedPathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should skip packages that match the list", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused", "-r")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("Focused_fixture Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when all packages are skipped", func() {
|
||||||
|
It("should not run anything, but still exit 0", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused,ginkgo", "-r")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("All tests skipped!"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("Focused_fixture Suite"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when there are no tests to run", func() {
|
||||||
|
It("should exit 1", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused", "-r")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Err.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Found no test suites"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when told to randomizeSuites", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
otherPathToTest := tmpPath("other")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should skip packages that match the regexp", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "--randomizeSuites", "-r", "--seed=2")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
Ω(session).Should(gbytes.Say("More_ginkgo_tests Suite"))
|
||||||
|
Ω(session).Should(gbytes.Say("Passing_ginkgo_tests Suite"))
|
||||||
|
|
||||||
|
session = startGinkgo(tmpDir, "--noColor", "--randomizeSuites", "-r", "--seed=3")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
Ω(session).Should(gbytes.Say("Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(session).Should(gbytes.Say("More_ginkgo_tests Suite"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when pointed at a package with xunit style tests", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("xunit")
|
||||||
|
copyIn("xunit_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the xunit style tests", func() {
|
||||||
|
session := startGinkgo(pathToTest)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("--- PASS: TestAlwaysTrue"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when pointed at a package with no tests", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("no_tests")
|
||||||
|
copyIn("no_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
|
||||||
|
Ω(session.Err.Contents()).Should(ContainSubstring("Found no test suites"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when pointed at a package that fails to compile", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("does_not_compile")
|
||||||
|
copyIn("does_not_compile", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Failed to compile"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when running in parallel", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with a specific number of -nodes", func() {
|
||||||
|
It("should use the specified number of nodes", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "-succinct", "-nodes=2")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs - 2 nodes [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with -p", func() {
|
||||||
|
It("it should autocompute the number of nodes", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "-succinct", "-p")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
nodes := runtime.NumCPU()
|
||||||
|
if nodes == 1 {
|
||||||
|
Skip("Can't test parallel testings with 1 CPU")
|
||||||
|
}
|
||||||
|
if nodes > 4 {
|
||||||
|
nodes = nodes - 1
|
||||||
|
}
|
||||||
|
Ω(output).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs - %d nodes [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s`, nodes, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when streaming in parallel", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should print output in realtime", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "-stream", "-nodes=2")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring(`[1] Parallel test node 1/2.`))
|
||||||
|
Ω(output).Should(ContainSubstring(`[2] Parallel test node 2/2.`))
|
||||||
|
Ω(output).Should(ContainSubstring(`[1] SUCCESS!`))
|
||||||
|
Ω(output).Should(ContainSubstring(`[2] SUCCESS!`))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when running recursively", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
passingTest := tmpPath("A")
|
||||||
|
otherPassingTest := tmpPath("E")
|
||||||
|
copyIn("passing_ginkgo_tests", passingTest)
|
||||||
|
copyIn("more_ginkgo_tests", otherPassingTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when all the tests pass", func() {
|
||||||
|
Context("with the -r flag", func() {
|
||||||
|
It("should run all the tests (in succinct mode) and succeed", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "-r", ".")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
outputLines := strings.Split(output, "\n")
|
||||||
|
Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs [%s]{2} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
Context("with a trailing /...", func() {
|
||||||
|
It("should run all the tests (in succinct mode) and succeed", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "./...")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
outputLines := strings.Split(output, "\n")
|
||||||
|
Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs [%s]{2} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when one of the packages has a failing tests", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
failingTest := tmpPath("C")
|
||||||
|
copyIn("failing_ginkgo_tests", failingTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail and stop running tests", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "-r")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
outputLines := strings.Split(output, "\n")
|
||||||
|
Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] Failing_ginkgo_tests Suite - 2/2 specs`))
|
||||||
|
Ω(output).Should(ContainSubstring(fmt.Sprintf("%s Failure", denoter)))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Failed"))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Summarizing 1 Failure:"))
|
||||||
|
Ω(output).Should(ContainSubstring("[Fail] FailingGinkgoTests [It] should fail"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when one of the packages fails to compile", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
doesNotCompileTest := tmpPath("C")
|
||||||
|
copyIn("does_not_compile", doesNotCompileTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail and stop running tests", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "-r")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
outputLines := strings.Split(output, "\n")
|
||||||
|
Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(outputLines[1]).Should(ContainSubstring("Failed to compile C:"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Failed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when either is the case, but the keepGoing flag is set", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
doesNotCompileTest := tmpPath("B")
|
||||||
|
copyIn("does_not_compile", doesNotCompileTest)
|
||||||
|
|
||||||
|
failingTest := tmpPath("C")
|
||||||
|
copyIn("failing_ginkgo_tests", failingTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should soldier on", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--noColor", "-r", "-keepGoing")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
outputLines := strings.Split(output, "\n")
|
||||||
|
Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(outputLines[1]).Should(ContainSubstring("Failed to compile B:"))
|
||||||
|
Ω(output).Should(MatchRegexp(`\[\d+\] Failing_ginkgo_tests Suite - 2/2 specs`))
|
||||||
|
Ω(output).Should(ContainSubstring(fmt.Sprintf("%s Failure", denoter)))
|
||||||
|
Ω(output).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs [%s]{2} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Failed"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when told to keep going --untilItFails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
copyIn("eventually_failing", tmpDir)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should keep rerunning the tests, until a failure occurs", func() {
|
||||||
|
session := startGinkgo(tmpDir, "--untilItFails", "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
Ω(session).Should(gbytes.Say("This was attempt #1"))
|
||||||
|
Ω(session).Should(gbytes.Say("This was attempt #2"))
|
||||||
|
Ω(session).Should(gbytes.Say("Tests failed on attempt #3"))
|
||||||
|
|
||||||
|
//it should change the random seed between each test
|
||||||
|
lines := strings.Split(string(session.Out.Contents()), "\n")
|
||||||
|
randomSeeds := []string{}
|
||||||
|
for _, line := range lines {
|
||||||
|
if strings.Contains(line, "Random Seed:") {
|
||||||
|
randomSeeds = append(randomSeeds, strings.Split(line, ": ")[1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ω(randomSeeds[0]).ShouldNot(Equal(randomSeeds[1]))
|
||||||
|
Ω(randomSeeds[1]).ShouldNot(Equal(randomSeeds[2]))
|
||||||
|
Ω(randomSeeds[0]).ShouldNot(Equal(randomSeeds[2]))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
43
vendor/github.com/onsi/ginkgo/integration/skip_test.go
generated
vendored
Normal file
43
vendor/github.com/onsi/ginkgo/integration/skip_test.go
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Skipping Specs", func() {
|
||||||
|
var pathToTest string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("skipping")
|
||||||
|
copyIn("skip_fixture", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should skip in all the possible ways", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS"))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("a top level skip on line 9"))
|
||||||
|
Ω(output).Should(ContainSubstring("skip_fixture_test.go:9"))
|
||||||
|
Ω(output).Should(ContainSubstring("an async top level skip on line 14"))
|
||||||
|
Ω(output).Should(ContainSubstring("skip_fixture_test.go:14"))
|
||||||
|
Ω(output).Should(ContainSubstring("a top level goroutine skip on line 21"))
|
||||||
|
Ω(output).Should(ContainSubstring("skip_fixture_test.go:21"))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("a sync SKIP"))
|
||||||
|
Ω(output).Should(ContainSubstring("an async SKIP"))
|
||||||
|
Ω(output).Should(ContainSubstring("a goroutine SKIP"))
|
||||||
|
Ω(output).Should(ContainSubstring("a measure SKIP"))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("S [SKIPPING] in Spec Setup (BeforeEach) ["))
|
||||||
|
Ω(output).Should(ContainSubstring("a BeforeEach SKIP"))
|
||||||
|
Ω(output).Should(ContainSubstring("S [SKIPPING] in Spec Teardown (AfterEach) ["))
|
||||||
|
Ω(output).Should(ContainSubstring("an AfterEach SKIP"))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("0 Passed | 0 Failed | 0 Pending | 9 Skipped"))
|
||||||
|
})
|
||||||
|
})
|
395
vendor/github.com/onsi/ginkgo/integration/subcommand_test.go
generated
vendored
Normal file
395
vendor/github.com/onsi/ginkgo/integration/subcommand_test.go
generated
vendored
Normal file
@ -0,0 +1,395 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Subcommand", func() {
|
||||||
|
Describe("ginkgo bootstrap", func() {
|
||||||
|
var pkgPath string
|
||||||
|
BeforeEach(func() {
|
||||||
|
pkgPath = tmpPath("foo")
|
||||||
|
os.Mkdir(pkgPath, 0777)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should generate a bootstrap file, as long as one does not exist", func() {
|
||||||
|
session := startGinkgo(pkgPath, "bootstrap")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("foo_suite_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_test"))
|
||||||
|
Ω(content).Should(ContainSubstring("func TestFoo(t *testing.T) {"))
|
||||||
|
Ω(content).Should(ContainSubstring("RegisterFailHandler"))
|
||||||
|
Ω(content).Should(ContainSubstring("RunSpecs"))
|
||||||
|
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||||
|
|
||||||
|
session = startGinkgo(pkgPath, "bootstrap")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output = session.Out.Contents()
|
||||||
|
Ω(output).Should(ContainSubstring("foo_suite_test.go already exists"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should import nodot declarations when told to", func() {
|
||||||
|
session := startGinkgo(pkgPath, "bootstrap", "--nodot")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("foo_suite_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_test"))
|
||||||
|
Ω(content).Should(ContainSubstring("func TestFoo(t *testing.T) {"))
|
||||||
|
Ω(content).Should(ContainSubstring("RegisterFailHandler"))
|
||||||
|
Ω(content).Should(ContainSubstring("RunSpecs"))
|
||||||
|
|
||||||
|
Ω(content).Should(ContainSubstring("var It = ginkgo.It"))
|
||||||
|
Ω(content).Should(ContainSubstring("var Ω = gomega.Ω"))
|
||||||
|
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `"github.com/onsi/ginkgo"`))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `"github.com/onsi/gomega"`))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should generate an agouti bootstrap file when told to", func() {
|
||||||
|
session := startGinkgo(pkgPath, "bootstrap", "--agouti")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("foo_suite_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_test"))
|
||||||
|
Ω(content).Should(ContainSubstring("func TestFoo(t *testing.T) {"))
|
||||||
|
Ω(content).Should(ContainSubstring("RegisterFailHandler"))
|
||||||
|
Ω(content).Should(ContainSubstring("RunSpecs"))
|
||||||
|
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `"github.com/sclevine/agouti"`))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should generate a bootstrap file using a template when told to", func() {
|
||||||
|
templateFile := filepath.Join(pkgPath, ".bootstrap")
|
||||||
|
ioutil.WriteFile(templateFile, []byte(`package {{.Package}}
|
||||||
|
|
||||||
|
import (
|
||||||
|
{{.GinkgoImport}}
|
||||||
|
{{.GomegaImport}}
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
"binary"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test{{.FormattedName}}(t *testing.T) {
|
||||||
|
// This is a {{.Package}} test
|
||||||
|
}`), 0666)
|
||||||
|
session := startGinkgo(pkgPath, "bootstrap", "--template", templateFile)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("foo_suite_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_test"))
|
||||||
|
Ω(content).Should(ContainSubstring(`. "github.com/onsi/ginkgo"`))
|
||||||
|
Ω(content).Should(ContainSubstring(`. "github.com/onsi/gomega"`))
|
||||||
|
Ω(content).Should(ContainSubstring(`"binary"`))
|
||||||
|
Ω(content).Should(ContainSubstring("// This is a foo_test test"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("nodot", func() {
|
||||||
|
It("should update the declarations in the bootstrap file", func() {
|
||||||
|
pkgPath := tmpPath("foo")
|
||||||
|
os.Mkdir(pkgPath, 0777)
|
||||||
|
|
||||||
|
session := startGinkgo(pkgPath, "bootstrap", "--nodot")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
byteContent, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
content := string(byteContent)
|
||||||
|
content = strings.Replace(content, "var It =", "var MyIt =", -1)
|
||||||
|
content = strings.Replace(content, "var Ω = gomega.Ω\n", "", -1)
|
||||||
|
|
||||||
|
err = ioutil.WriteFile(filepath.Join(pkgPath, "foo_suite_test.go"), []byte(content), os.ModePerm)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
session = startGinkgo(pkgPath, "nodot")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
byteContent, err = ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
Ω(byteContent).Should(ContainSubstring("var MyIt = ginkgo.It"))
|
||||||
|
Ω(byteContent).ShouldNot(ContainSubstring("var It = ginkgo.It"))
|
||||||
|
Ω(byteContent).Should(ContainSubstring("var Ω = gomega.Ω"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("ginkgo generate", func() {
|
||||||
|
var pkgPath string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
pkgPath = tmpPath("foo_bar")
|
||||||
|
os.Mkdir(pkgPath, 0777)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with no arguments", func() {
|
||||||
|
It("should generate a test file named after the package", func() {
|
||||||
|
session := startGinkgo(pkgPath, "generate")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("foo_bar_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_bar_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||||
|
Ω(content).Should(ContainSubstring(`var _ = Describe("FooBar", func() {`))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||||
|
|
||||||
|
session = startGinkgo(pkgPath, "generate")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output = session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("foo_bar_test.go already exists"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with an argument of the form: foo", func() {
|
||||||
|
It("should generate a test file named after the argument", func() {
|
||||||
|
session := startGinkgo(pkgPath, "generate", "baz_buzz")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("baz_buzz_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||||
|
Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with an argument of the form: foo.go", func() {
|
||||||
|
It("should generate a test file named after the argument", func() {
|
||||||
|
session := startGinkgo(pkgPath, "generate", "baz_buzz.go")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("baz_buzz_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||||
|
Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`))
|
||||||
|
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with an argument of the form: foo_test", func() {
|
||||||
|
It("should generate a test file named after the argument", func() {
|
||||||
|
session := startGinkgo(pkgPath, "generate", "baz_buzz_test")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("baz_buzz_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||||
|
Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with an argument of the form: foo_test.go", func() {
|
||||||
|
It("should generate a test file named after the argument", func() {
|
||||||
|
session := startGinkgo(pkgPath, "generate", "baz_buzz_test.go")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("baz_buzz_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||||
|
Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with multiple arguments", func() {
|
||||||
|
It("should generate a test file named after the argument", func() {
|
||||||
|
session := startGinkgo(pkgPath, "generate", "baz", "buzz")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("baz_test.go"))
|
||||||
|
Ω(output).Should(ContainSubstring("buzz_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||||
|
Ω(content).Should(ContainSubstring(`var _ = Describe("Baz", func() {`))
|
||||||
|
|
||||||
|
content, err = ioutil.ReadFile(filepath.Join(pkgPath, "buzz_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||||
|
Ω(content).Should(ContainSubstring(`var _ = Describe("Buzz", func() {`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with nodot", func() {
|
||||||
|
It("should not import ginkgo or gomega", func() {
|
||||||
|
session := startGinkgo(pkgPath, "generate", "--nodot")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("foo_bar_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_bar_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||||
|
Ω(content).ShouldNot(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||||
|
Ω(content).ShouldNot(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with agouti", func() {
|
||||||
|
It("should generate an agouti test file", func() {
|
||||||
|
session := startGinkgo(pkgPath, "generate", "--agouti")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("foo_bar_test.go"))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_bar_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `. "github.com/sclevine/agouti/matchers"`))
|
||||||
|
Ω(content).Should(ContainSubstring("\t" + `"github.com/sclevine/agouti"`))
|
||||||
|
Ω(content).Should(ContainSubstring("page, err = agoutiDriver.NewPage()"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("ginkgo bootstrap/generate", func() {
|
||||||
|
var pkgPath string
|
||||||
|
BeforeEach(func() {
|
||||||
|
pkgPath = tmpPath("some crazy-thing")
|
||||||
|
os.Mkdir(pkgPath, 0777)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the working directory is empty", func() {
|
||||||
|
It("generates correctly named bootstrap and generate files with a package name derived from the directory", func() {
|
||||||
|
session := startGinkgo(pkgPath, "bootstrap")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_suite_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package some_crazy_thing_test"))
|
||||||
|
Ω(content).Should(ContainSubstring("SomeCrazyThing Suite"))
|
||||||
|
|
||||||
|
session = startGinkgo(pkgPath, "generate")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
content, err = ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package some_crazy_thing_test"))
|
||||||
|
Ω(content).Should(ContainSubstring("SomeCrazyThing"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the working directory contains a file with a package name", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
Ω(ioutil.WriteFile(filepath.Join(pkgPath, "foo.go"), []byte("package main\n\nfunc main() {}"), 0777)).Should(Succeed())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("generates correctly named bootstrap and generate files with the package name", func() {
|
||||||
|
session := startGinkgo(pkgPath, "bootstrap")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_suite_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package main_test"))
|
||||||
|
Ω(content).Should(ContainSubstring("SomeCrazyThing Suite"))
|
||||||
|
|
||||||
|
session = startGinkgo(pkgPath, "generate")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
content, err = ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_test.go"))
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
Ω(content).Should(ContainSubstring("package main_test"))
|
||||||
|
Ω(content).Should(ContainSubstring("SomeCrazyThing"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("ginkgo blur", func() {
|
||||||
|
It("should unfocus tests", func() {
|
||||||
|
pathToTest := tmpPath("focused")
|
||||||
|
copyIn("focused_fixture", pathToTest)
|
||||||
|
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("6 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("5 Skipped"))
|
||||||
|
|
||||||
|
session = startGinkgo(pathToTest, "blur")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
|
||||||
|
session = startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output = session.Out.Contents()
|
||||||
|
Ω(output).Should(ContainSubstring("11 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Skipped"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("ginkgo version", func() {
|
||||||
|
It("should print out the version info", func() {
|
||||||
|
session := startGinkgo("", "version")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(MatchRegexp(`Ginkgo Version \d+\.\d+\.\d+`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("ginkgo help", func() {
|
||||||
|
It("should print out usage information", func() {
|
||||||
|
session := startGinkgo("", "help")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Err.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(MatchRegexp(`Ginkgo Version \d+\.\d+\.\d+`))
|
||||||
|
Ω(output).Should(ContainSubstring("ginkgo watch"))
|
||||||
|
Ω(output).Should(ContainSubstring("-succinct"))
|
||||||
|
Ω(output).Should(ContainSubstring("-nodes"))
|
||||||
|
Ω(output).Should(ContainSubstring("ginkgo generate"))
|
||||||
|
Ω(output).Should(ContainSubstring("ginkgo help <COMMAND>"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
63
vendor/github.com/onsi/ginkgo/integration/suite_command_test.go
generated
vendored
Normal file
63
vendor/github.com/onsi/ginkgo/integration/suite_command_test.go
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Suite Command Specs", func() {
|
||||||
|
var pathToTest string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("suite_command")
|
||||||
|
copyIn("suite_command_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("Runs command after suite echoing out suite data, properly reporting suite name and passing status in successful command output", func() {
|
||||||
|
command := "-afterSuiteHook=echo THIS IS A (ginkgo-suite-passed) TEST OF THE (ginkgo-suite-name) SYSTEM, THIS IS ONLY A TEST"
|
||||||
|
expected := "THIS IS A [PASS] TEST OF THE suite_command SYSTEM, THIS IS ONLY A TEST"
|
||||||
|
session := startGinkgo(pathToTest, command)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("1 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||||
|
Ω(output).Should(ContainSubstring("1 Pending"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Skipped"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("Post-suite command succeeded:"))
|
||||||
|
Ω(output).Should(ContainSubstring(expected))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("Runs command after suite reporting that command failed", func() {
|
||||||
|
command := "-afterSuiteHook=exit 1"
|
||||||
|
session := startGinkgo(pathToTest, command)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("1 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||||
|
Ω(output).Should(ContainSubstring("1 Pending"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Skipped"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("Post-suite command failed:"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("Runs command after suite echoing out suite data, properly reporting suite name and failing status in successful command output", func() {
|
||||||
|
command := "-afterSuiteHook=echo THIS IS A (ginkgo-suite-passed) TEST OF THE (ginkgo-suite-name) SYSTEM, THIS IS ONLY A TEST"
|
||||||
|
expected := "THIS IS A [FAIL] TEST OF THE suite_command SYSTEM, THIS IS ONLY A TEST"
|
||||||
|
session := startGinkgo(pathToTest, "-failOnPending=true", command)
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("1 Passed"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||||
|
Ω(output).Should(ContainSubstring("1 Pending"))
|
||||||
|
Ω(output).Should(ContainSubstring("0 Skipped"))
|
||||||
|
Ω(output).Should(ContainSubstring("Test Suite Failed"))
|
||||||
|
Ω(output).Should(ContainSubstring("Post-suite command succeeded:"))
|
||||||
|
Ω(output).Should(ContainSubstring(expected))
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
178
vendor/github.com/onsi/ginkgo/integration/suite_setup_test.go
generated
vendored
Normal file
178
vendor/github.com/onsi/ginkgo/integration/suite_setup_test.go
generated
vendored
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("SuiteSetup", func() {
|
||||||
|
var pathToTest string
|
||||||
|
|
||||||
|
Context("when the BeforeSuite and AfterSuite pass", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("suite_setup")
|
||||||
|
copyIn("passing_suite_setup", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the BeforeSuite once, then run all the tests", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(1))
|
||||||
|
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the BeforeSuite once per parallel node, then run all the tests", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--nodes=2")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(2))
|
||||||
|
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(2))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the BeforeSuite fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("suite_setup")
|
||||||
|
copyIn("failing_before_suite", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the BeforeSuite once, none of the tests, but it should run the AfterSuite", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(1))
|
||||||
|
Ω(strings.Count(output, "Test Panicked")).Should(Equal(1))
|
||||||
|
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(1))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the BeforeSuite once per parallel node, none of the tests, but it should run the AfterSuite for each node", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--nodes=2")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(2))
|
||||||
|
Ω(strings.Count(output, "Test Panicked")).Should(Equal(2))
|
||||||
|
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(2))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the AfterSuite fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("suite_setup")
|
||||||
|
copyIn("failing_after_suite", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the BeforeSuite once, none of the tests, but it should run the AfterSuite", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(1))
|
||||||
|
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(1))
|
||||||
|
Ω(strings.Count(output, "Test Panicked")).Should(Equal(1))
|
||||||
|
Ω(strings.Count(output, "A TEST")).Should(Equal(2))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run the BeforeSuite once per parallel node, none of the tests, but it should run the AfterSuite for each node", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--nodes=2")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(2))
|
||||||
|
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(2))
|
||||||
|
Ω(strings.Count(output, "Test Panicked")).Should(Equal(2))
|
||||||
|
Ω(strings.Count(output, "A TEST")).Should(Equal(2))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("With passing synchronized before and after suites", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("suite_setup")
|
||||||
|
copyIn("synchronized_setup_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when run with one node", func() {
|
||||||
|
It("should do all the work on that one node", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("BEFORE_A_1\nBEFORE_B_1: DATA"))
|
||||||
|
Ω(output).Should(ContainSubstring("AFTER_A_1\nAFTER_B_1"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when run across multiple nodes", func() {
|
||||||
|
It("should run the first BeforeSuite function (BEFORE_A) on node 1, the second (BEFORE_B) on all the nodes, the first AfterSuite (AFTER_A) on all the nodes, and then the second (AFTER_B) on Node 1 *after* everything else is finished", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--nodes=3")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("BEFORE_A_1"))
|
||||||
|
Ω(output).Should(ContainSubstring("BEFORE_B_1: DATA"))
|
||||||
|
Ω(output).Should(ContainSubstring("BEFORE_B_2: DATA"))
|
||||||
|
Ω(output).Should(ContainSubstring("BEFORE_B_3: DATA"))
|
||||||
|
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("BEFORE_A_2"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("BEFORE_A_3"))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("AFTER_A_1"))
|
||||||
|
Ω(output).Should(ContainSubstring("AFTER_A_2"))
|
||||||
|
Ω(output).Should(ContainSubstring("AFTER_A_3"))
|
||||||
|
Ω(output).Should(ContainSubstring("AFTER_B_1"))
|
||||||
|
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("AFTER_B_2"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("AFTER_B_3"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when streaming across multiple nodes", func() {
|
||||||
|
It("should run the first BeforeSuite function (BEFORE_A) on node 1, the second (BEFORE_B) on all the nodes, the first AfterSuite (AFTER_A) on all the nodes, and then the second (AFTER_B) on Node 1 *after* everything else is finished", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--nodes=3", "--stream")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("[1] BEFORE_A_1"))
|
||||||
|
Ω(output).Should(ContainSubstring("[1] BEFORE_B_1: DATA"))
|
||||||
|
Ω(output).Should(ContainSubstring("[2] BEFORE_B_2: DATA"))
|
||||||
|
Ω(output).Should(ContainSubstring("[3] BEFORE_B_3: DATA"))
|
||||||
|
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("BEFORE_A_2"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("BEFORE_A_3"))
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("[1] AFTER_A_1"))
|
||||||
|
Ω(output).Should(ContainSubstring("[2] AFTER_A_2"))
|
||||||
|
Ω(output).Should(ContainSubstring("[3] AFTER_A_3"))
|
||||||
|
Ω(output).Should(ContainSubstring("[1] AFTER_B_1"))
|
||||||
|
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("AFTER_B_2"))
|
||||||
|
Ω(output).ShouldNot(ContainSubstring("AFTER_B_3"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("With a failing synchronized before suite", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("suite_setup")
|
||||||
|
copyIn("exiting_synchronized_setup_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail and let the user know that node 1 disappeared prematurely", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--nodes=3")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Node 1 disappeared before completing BeforeSuite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Ginkgo timed out waiting for all parallel nodes to report back!"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
27
vendor/github.com/onsi/ginkgo/integration/tags_test.go
generated
vendored
Normal file
27
vendor/github.com/onsi/ginkgo/integration/tags_test.go
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Tags", func() {
|
||||||
|
var pathToTest string
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("tags")
|
||||||
|
copyIn("tags_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should honor the passed in -tags flag", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := string(session.Out.Contents())
|
||||||
|
Ω(output).Should(ContainSubstring("Ran 1 of 1 Specs"))
|
||||||
|
|
||||||
|
session = startGinkgo(pathToTest, "--noColor", "-tags=complex_tests")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output = string(session.Out.Contents())
|
||||||
|
Ω(output).Should(ContainSubstring("Ran 3 of 3 Specs"))
|
||||||
|
})
|
||||||
|
})
|
25
vendor/github.com/onsi/ginkgo/integration/test_description_test.go
generated
vendored
Normal file
25
vendor/github.com/onsi/ginkgo/integration/test_description_test.go
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gbytes"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("TestDescription", func() {
|
||||||
|
var pathToTest string
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("test_description")
|
||||||
|
copyIn("test_description", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should capture and emit information about the current test", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(1))
|
||||||
|
|
||||||
|
Ω(session).Should(gbytes.Say("TestDescription should pass:false"))
|
||||||
|
Ω(session).Should(gbytes.Say("TestDescription should fail:true"))
|
||||||
|
})
|
||||||
|
})
|
90
vendor/github.com/onsi/ginkgo/integration/verbose_and_succinct_test.go
generated
vendored
Normal file
90
vendor/github.com/onsi/ginkgo/integration/verbose_and_succinct_test.go
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Verbose And Succinct Mode", func() {
|
||||||
|
var pathToTest string
|
||||||
|
var otherPathToTest string
|
||||||
|
|
||||||
|
isWindows := (runtime.GOOS == "windows")
|
||||||
|
denoter := "•"
|
||||||
|
|
||||||
|
if isWindows {
|
||||||
|
denoter = "+"
|
||||||
|
}
|
||||||
|
|
||||||
|
Context("when running one package", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should default to non-succinct mode", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when running more than one package", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
pathToTest = tmpPath("ginkgo")
|
||||||
|
copyIn("passing_ginkgo_tests", pathToTest)
|
||||||
|
otherPathToTest = tmpPath("more_ginkgo")
|
||||||
|
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with no flags set", func() {
|
||||||
|
It("should default to succinct mode", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", pathToTest, otherPathToTest)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(MatchRegexp(`\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS!`, regexp.QuoteMeta(denoter)))
|
||||||
|
Ω(output).Should(MatchRegexp(`\] More_ginkgo_tests Suite - 2/2 specs [%s]{2} SUCCESS!`, regexp.QuoteMeta(denoter)))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with --succinct=false", func() {
|
||||||
|
It("should not be in succinct mode", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "--succinct=false", pathToTest, otherPathToTest)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with -v", func() {
|
||||||
|
It("should not be in succinct mode, but should be verbose", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "-v", pathToTest, otherPathToTest)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite"))
|
||||||
|
Ω(output).Should(ContainSubstring("should proxy strings"))
|
||||||
|
Ω(output).Should(ContainSubstring("should always pass"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should emit output from Bys", func() {
|
||||||
|
session := startGinkgo(pathToTest, "--noColor", "-v", pathToTest)
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
output := session.Out.Contents()
|
||||||
|
|
||||||
|
Ω(output).Should(ContainSubstring("emitting one By"))
|
||||||
|
Ω(output).Should(ContainSubstring("emitting another By"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
239
vendor/github.com/onsi/ginkgo/integration/watch_test.go
generated
vendored
Normal file
239
vendor/github.com/onsi/ginkgo/integration/watch_test.go
generated
vendored
Normal file
@ -0,0 +1,239 @@
|
|||||||
|
package integration_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/onsi/gomega/gbytes"
|
||||||
|
"github.com/onsi/gomega/gexec"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Watch", func() {
|
||||||
|
var rootPath string
|
||||||
|
var pathA string
|
||||||
|
var pathB string
|
||||||
|
var pathC string
|
||||||
|
var session *gexec.Session
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
rootPath = tmpPath("root")
|
||||||
|
pathA = filepath.Join(rootPath, "src", "github.com", "onsi", "A")
|
||||||
|
pathB = filepath.Join(rootPath, "src", "github.com", "onsi", "B")
|
||||||
|
pathC = filepath.Join(rootPath, "src", "github.com", "onsi", "C")
|
||||||
|
|
||||||
|
err := os.MkdirAll(pathA, 0700)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
err = os.MkdirAll(pathB, 0700)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
err = os.MkdirAll(pathC, 0700)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
copyIn(filepath.Join("watch_fixtures", "A"), pathA)
|
||||||
|
copyIn(filepath.Join("watch_fixtures", "B"), pathB)
|
||||||
|
copyIn(filepath.Join("watch_fixtures", "C"), pathC)
|
||||||
|
})
|
||||||
|
|
||||||
|
startGinkgoWithGopath := func(args ...string) *gexec.Session {
|
||||||
|
cmd := ginkgoCommand(rootPath, args...)
|
||||||
|
cmd.Env = append([]string{"GOPATH=" + rootPath + ":" + os.Getenv("GOPATH")}, os.Environ()...)
|
||||||
|
session, err := gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
return session
|
||||||
|
}
|
||||||
|
|
||||||
|
modifyFile := func(path string) {
|
||||||
|
time.Sleep(time.Second)
|
||||||
|
content, err := ioutil.ReadFile(path)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
content = append(content, []byte("//")...)
|
||||||
|
err = ioutil.WriteFile(path, content, 0666)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
}
|
||||||
|
|
||||||
|
modifyCode := func(pkgToModify string) {
|
||||||
|
modifyFile(filepath.Join(rootPath, "src", "github.com", "onsi", pkgToModify, pkgToModify+".go"))
|
||||||
|
}
|
||||||
|
|
||||||
|
modifyTest := func(pkgToModify string) {
|
||||||
|
modifyFile(filepath.Join(rootPath, "src", "github.com", "onsi", pkgToModify, pkgToModify+"_test.go"))
|
||||||
|
}
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
if session != nil {
|
||||||
|
session.Kill().Wait()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be set up correctly", func() {
|
||||||
|
session = startGinkgoWithGopath("-r")
|
||||||
|
Eventually(session).Should(gexec.Exit(0))
|
||||||
|
Ω(session.Out.Contents()).Should(ContainSubstring("A Suite"))
|
||||||
|
Ω(session.Out.Contents()).Should(ContainSubstring("B Suite"))
|
||||||
|
Ω(session.Out.Contents()).Should(ContainSubstring("C Suite"))
|
||||||
|
Ω(session.Out.Contents()).Should(ContainSubstring("Ginkgo ran 3 suites"))
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when watching just one test suite", func() {
|
||||||
|
It("should immediately run, and should rerun when the test suite changes", func() {
|
||||||
|
session = startGinkgoWithGopath("watch", "-succinct", pathA)
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
modifyCode("A")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
session.Kill().Wait()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when watching several test suites", func() {
|
||||||
|
It("should not immediately run, but should rerun a test when its code changes", func() {
|
||||||
|
session = startGinkgoWithGopath("watch", "-succinct", "-r")
|
||||||
|
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("A Suite|B Suite|C Suite"))
|
||||||
|
modifyCode("A")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||||
|
session.Kill().Wait()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("watching dependencies", func() {
|
||||||
|
Context("with a depth of 2", func() {
|
||||||
|
It("should watch down to that depth", func() {
|
||||||
|
session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=2")
|
||||||
|
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||||
|
Eventually(session).Should(gbytes.Say(`A \[2 dependencies\]`))
|
||||||
|
Eventually(session).Should(gbytes.Say(`B \[1 dependency\]`))
|
||||||
|
Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`))
|
||||||
|
|
||||||
|
modifyCode("A")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||||
|
|
||||||
|
modifyCode("B")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("C Suite"))
|
||||||
|
|
||||||
|
modifyCode("C")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||||
|
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with a depth of 1", func() {
|
||||||
|
It("should watch down to that depth", func() {
|
||||||
|
session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=1")
|
||||||
|
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||||
|
Eventually(session).Should(gbytes.Say(`A \[1 dependency\]`))
|
||||||
|
Eventually(session).Should(gbytes.Say(`B \[1 dependency\]`))
|
||||||
|
Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`))
|
||||||
|
|
||||||
|
modifyCode("A")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||||
|
|
||||||
|
modifyCode("B")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("C Suite"))
|
||||||
|
|
||||||
|
modifyCode("C")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||||
|
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("A Suite"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with a depth of 0", func() {
|
||||||
|
It("should not watch any dependencies", func() {
|
||||||
|
session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=0")
|
||||||
|
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||||
|
Eventually(session).Should(gbytes.Say(`A \[0 dependencies\]`))
|
||||||
|
Eventually(session).Should(gbytes.Say(`B \[0 dependencies\]`))
|
||||||
|
Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`))
|
||||||
|
|
||||||
|
modifyCode("A")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||||
|
|
||||||
|
modifyCode("B")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("A Suite|C Suite"))
|
||||||
|
|
||||||
|
modifyCode("C")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("A Suite|B Suite"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not trigger dependents when tests are changed", func() {
|
||||||
|
session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=2")
|
||||||
|
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||||
|
Eventually(session).Should(gbytes.Say(`A \[2 dependencies\]`))
|
||||||
|
Eventually(session).Should(gbytes.Say(`B \[1 dependency\]`))
|
||||||
|
Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`))
|
||||||
|
|
||||||
|
modifyTest("A")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||||
|
|
||||||
|
modifyTest("B")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("A Suite|C Suite"))
|
||||||
|
|
||||||
|
modifyTest("C")
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||||
|
Consistently(session).ShouldNot(gbytes.Say("A Suite|B Suite"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("when new test suite is added", func() {
|
||||||
|
It("should start monitoring that test suite", func() {
|
||||||
|
session = startGinkgoWithGopath("watch", "-succinct", "-r")
|
||||||
|
|
||||||
|
Eventually(session).Should(gbytes.Say("Watching 3 suites"))
|
||||||
|
|
||||||
|
pathD := filepath.Join(rootPath, "src", "github.com", "onsi", "D")
|
||||||
|
|
||||||
|
err := os.MkdirAll(pathD, 0700)
|
||||||
|
Ω(err).ShouldNot(HaveOccurred())
|
||||||
|
|
||||||
|
copyIn(filepath.Join("watch_fixtures", "D"), pathD)
|
||||||
|
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected 1 new suite"))
|
||||||
|
Eventually(session).Should(gbytes.Say(`D \[1 dependency\]`))
|
||||||
|
Eventually(session).Should(gbytes.Say("D Suite"))
|
||||||
|
|
||||||
|
modifyCode("D")
|
||||||
|
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("D Suite"))
|
||||||
|
|
||||||
|
modifyCode("C")
|
||||||
|
|
||||||
|
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||||
|
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||||
|
Eventually(session).Should(gbytes.Say("D Suite"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
32
vendor/github.com/onsi/ginkgo/internal/codelocation/code_location.go
generated
vendored
Normal file
32
vendor/github.com/onsi/ginkgo/internal/codelocation/code_location.go
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
package codelocation
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
"runtime/debug"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
func New(skip int) types.CodeLocation {
|
||||||
|
_, file, line, _ := runtime.Caller(skip + 1)
|
||||||
|
stackTrace := PruneStack(string(debug.Stack()), skip)
|
||||||
|
return types.CodeLocation{FileName: file, LineNumber: line, FullStackTrace: stackTrace}
|
||||||
|
}
|
||||||
|
|
||||||
|
func PruneStack(fullStackTrace string, skip int) string {
|
||||||
|
stack := strings.Split(fullStackTrace, "\n")
|
||||||
|
if len(stack) > 2*(skip+1) {
|
||||||
|
stack = stack[2*(skip+1):]
|
||||||
|
}
|
||||||
|
prunedStack := []string{}
|
||||||
|
re := regexp.MustCompile(`\/ginkgo\/|\/pkg\/testing\/|\/pkg\/runtime\/`)
|
||||||
|
for i := 0; i < len(stack)/2; i++ {
|
||||||
|
if !re.Match([]byte(stack[i*2])) {
|
||||||
|
prunedStack = append(prunedStack, stack[i*2])
|
||||||
|
prunedStack = append(prunedStack, stack[i*2+1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(prunedStack, "\n")
|
||||||
|
}
|
13
vendor/github.com/onsi/ginkgo/internal/codelocation/code_location_suite_test.go
generated
vendored
Normal file
13
vendor/github.com/onsi/ginkgo/internal/codelocation/code_location_suite_test.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package codelocation_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCodelocation(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "CodeLocation Suite")
|
||||||
|
}
|
79
vendor/github.com/onsi/ginkgo/internal/codelocation/code_location_test.go
generated
vendored
Normal file
79
vendor/github.com/onsi/ginkgo/internal/codelocation/code_location_test.go
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
package codelocation_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("CodeLocation", func() {
|
||||||
|
var (
|
||||||
|
codeLocation types.CodeLocation
|
||||||
|
expectedFileName string
|
||||||
|
expectedLineNumber int
|
||||||
|
)
|
||||||
|
|
||||||
|
caller0 := func() {
|
||||||
|
codeLocation = codelocation.New(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
caller1 := func() {
|
||||||
|
_, expectedFileName, expectedLineNumber, _ = runtime.Caller(0)
|
||||||
|
expectedLineNumber += 2
|
||||||
|
caller0()
|
||||||
|
}
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
caller1()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should use the passed in skip parameter to pick out the correct file & line number", func() {
|
||||||
|
Ω(codeLocation.FileName).Should(Equal(expectedFileName))
|
||||||
|
Ω(codeLocation.LineNumber).Should(Equal(expectedLineNumber))
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("stringer behavior", func() {
|
||||||
|
It("should stringify nicely", func() {
|
||||||
|
Ω(codeLocation.String()).Should(ContainSubstring("code_location_test.go:%d", expectedLineNumber))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
//There's no better way than to test this private method as it
|
||||||
|
//goes out of its way to prune out ginkgo related code in the stack trace
|
||||||
|
Describe("PruneStack", func() {
|
||||||
|
It("should remove any references to ginkgo and pkg/testing and pkg/runtime", func() {
|
||||||
|
input := `/Skip/me
|
||||||
|
Skip: skip()
|
||||||
|
/Skip/me
|
||||||
|
Skip: skip()
|
||||||
|
/Users/whoever/gospace/src/github.com/onsi/ginkgo/whatever.go:10 (0x12314)
|
||||||
|
Something: Func()
|
||||||
|
/Users/whoever/gospace/src/github.com/onsi/ginkgo/whatever_else.go:10 (0x12314)
|
||||||
|
SomethingInternalToGinkgo: Func()
|
||||||
|
/usr/goroot/pkg/strings/oops.go:10 (0x12341)
|
||||||
|
Oops: BlowUp()
|
||||||
|
/Users/whoever/gospace/src/mycode/code.go:10 (0x12341)
|
||||||
|
MyCode: Func()
|
||||||
|
/Users/whoever/gospace/src/mycode/code_test.go:10 (0x12341)
|
||||||
|
MyCodeTest: Func()
|
||||||
|
/Users/whoever/gospace/src/mycode/code_suite_test.go:12 (0x37f08)
|
||||||
|
TestFoo: RunSpecs(t, "Foo Suite")
|
||||||
|
/usr/goroot/pkg/testing/testing.go:12 (0x37f08)
|
||||||
|
TestingT: Blah()
|
||||||
|
/usr/goroot/pkg/runtime/runtime.go:12 (0x37f08)
|
||||||
|
Something: Func()
|
||||||
|
`
|
||||||
|
prunedStack := codelocation.PruneStack(input, 1)
|
||||||
|
Ω(prunedStack).Should(Equal(`/usr/goroot/pkg/strings/oops.go:10 (0x12341)
|
||||||
|
Oops: BlowUp()
|
||||||
|
/Users/whoever/gospace/src/mycode/code.go:10 (0x12341)
|
||||||
|
MyCode: Func()
|
||||||
|
/Users/whoever/gospace/src/mycode/code_test.go:10 (0x12341)
|
||||||
|
MyCodeTest: Func()
|
||||||
|
/Users/whoever/gospace/src/mycode/code_suite_test.go:12 (0x37f08)
|
||||||
|
TestFoo: RunSpecs(t, "Foo Suite")`))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
151
vendor/github.com/onsi/ginkgo/internal/containernode/container_node.go
generated
vendored
Normal file
151
vendor/github.com/onsi/ginkgo/internal/containernode/container_node.go
generated
vendored
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
package containernode
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math/rand"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/leafnodes"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type subjectOrContainerNode struct {
|
||||||
|
containerNode *ContainerNode
|
||||||
|
subjectNode leafnodes.SubjectNode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n subjectOrContainerNode) text() string {
|
||||||
|
if n.containerNode != nil {
|
||||||
|
return n.containerNode.Text()
|
||||||
|
} else {
|
||||||
|
return n.subjectNode.Text()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type CollatedNodes struct {
|
||||||
|
Containers []*ContainerNode
|
||||||
|
Subject leafnodes.SubjectNode
|
||||||
|
}
|
||||||
|
|
||||||
|
type ContainerNode struct {
|
||||||
|
text string
|
||||||
|
flag types.FlagType
|
||||||
|
codeLocation types.CodeLocation
|
||||||
|
|
||||||
|
setupNodes []leafnodes.BasicNode
|
||||||
|
subjectAndContainerNodes []subjectOrContainerNode
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(text string, flag types.FlagType, codeLocation types.CodeLocation) *ContainerNode {
|
||||||
|
return &ContainerNode{
|
||||||
|
text: text,
|
||||||
|
flag: flag,
|
||||||
|
codeLocation: codeLocation,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (container *ContainerNode) Shuffle(r *rand.Rand) {
|
||||||
|
sort.Sort(container)
|
||||||
|
permutation := r.Perm(len(container.subjectAndContainerNodes))
|
||||||
|
shuffledNodes := make([]subjectOrContainerNode, len(container.subjectAndContainerNodes))
|
||||||
|
for i, j := range permutation {
|
||||||
|
shuffledNodes[i] = container.subjectAndContainerNodes[j]
|
||||||
|
}
|
||||||
|
container.subjectAndContainerNodes = shuffledNodes
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) BackPropagateProgrammaticFocus() bool {
|
||||||
|
if node.flag == types.FlagTypePending {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
shouldUnfocus := false
|
||||||
|
for _, subjectOrContainerNode := range node.subjectAndContainerNodes {
|
||||||
|
if subjectOrContainerNode.containerNode != nil {
|
||||||
|
shouldUnfocus = subjectOrContainerNode.containerNode.BackPropagateProgrammaticFocus() || shouldUnfocus
|
||||||
|
} else {
|
||||||
|
shouldUnfocus = (subjectOrContainerNode.subjectNode.Flag() == types.FlagTypeFocused) || shouldUnfocus
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if shouldUnfocus {
|
||||||
|
if node.flag == types.FlagTypeFocused {
|
||||||
|
node.flag = types.FlagTypeNone
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return node.flag == types.FlagTypeFocused
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) Collate() []CollatedNodes {
|
||||||
|
return node.collate([]*ContainerNode{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) collate(enclosingContainers []*ContainerNode) []CollatedNodes {
|
||||||
|
collated := make([]CollatedNodes, 0)
|
||||||
|
|
||||||
|
containers := make([]*ContainerNode, len(enclosingContainers))
|
||||||
|
copy(containers, enclosingContainers)
|
||||||
|
containers = append(containers, node)
|
||||||
|
|
||||||
|
for _, subjectOrContainer := range node.subjectAndContainerNodes {
|
||||||
|
if subjectOrContainer.containerNode != nil {
|
||||||
|
collated = append(collated, subjectOrContainer.containerNode.collate(containers)...)
|
||||||
|
} else {
|
||||||
|
collated = append(collated, CollatedNodes{
|
||||||
|
Containers: containers,
|
||||||
|
Subject: subjectOrContainer.subjectNode,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return collated
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) PushContainerNode(container *ContainerNode) {
|
||||||
|
node.subjectAndContainerNodes = append(node.subjectAndContainerNodes, subjectOrContainerNode{containerNode: container})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) PushSubjectNode(subject leafnodes.SubjectNode) {
|
||||||
|
node.subjectAndContainerNodes = append(node.subjectAndContainerNodes, subjectOrContainerNode{subjectNode: subject})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) PushSetupNode(setupNode leafnodes.BasicNode) {
|
||||||
|
node.setupNodes = append(node.setupNodes, setupNode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) SetupNodesOfType(nodeType types.SpecComponentType) []leafnodes.BasicNode {
|
||||||
|
nodes := []leafnodes.BasicNode{}
|
||||||
|
for _, setupNode := range node.setupNodes {
|
||||||
|
if setupNode.Type() == nodeType {
|
||||||
|
nodes = append(nodes, setupNode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) Text() string {
|
||||||
|
return node.text
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) CodeLocation() types.CodeLocation {
|
||||||
|
return node.codeLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) Flag() types.FlagType {
|
||||||
|
return node.flag
|
||||||
|
}
|
||||||
|
|
||||||
|
//sort.Interface
|
||||||
|
|
||||||
|
func (node *ContainerNode) Len() int {
|
||||||
|
return len(node.subjectAndContainerNodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) Less(i, j int) bool {
|
||||||
|
return node.subjectAndContainerNodes[i].text() < node.subjectAndContainerNodes[j].text()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ContainerNode) Swap(i, j int) {
|
||||||
|
node.subjectAndContainerNodes[i], node.subjectAndContainerNodes[j] = node.subjectAndContainerNodes[j], node.subjectAndContainerNodes[i]
|
||||||
|
}
|
13
vendor/github.com/onsi/ginkgo/internal/containernode/container_node_suite_test.go
generated
vendored
Normal file
13
vendor/github.com/onsi/ginkgo/internal/containernode/container_node_suite_test.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package containernode_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestContainernode(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Containernode Suite")
|
||||||
|
}
|
212
vendor/github.com/onsi/ginkgo/internal/containernode/container_node_test.go
generated
vendored
Normal file
212
vendor/github.com/onsi/ginkgo/internal/containernode/container_node_test.go
generated
vendored
Normal file
@ -0,0 +1,212 @@
|
|||||||
|
package containernode_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/onsi/ginkgo/internal/leafnodes"
|
||||||
|
"math/rand"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
. "github.com/onsi/ginkgo/internal/containernode"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Container Node", func() {
|
||||||
|
var (
|
||||||
|
codeLocation types.CodeLocation
|
||||||
|
container *ContainerNode
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
codeLocation = codelocation.New(0)
|
||||||
|
container = New("description text", types.FlagTypeFocused, codeLocation)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("creating a container node", func() {
|
||||||
|
It("can answer questions about itself", func() {
|
||||||
|
Ω(container.Text()).Should(Equal("description text"))
|
||||||
|
Ω(container.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||||
|
Ω(container.CodeLocation()).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("pushing setup nodes", func() {
|
||||||
|
It("can append setup nodes of various types and fetch them by type", func() {
|
||||||
|
befA := leafnodes.NewBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||||
|
befB := leafnodes.NewBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||||
|
aftA := leafnodes.NewAfterEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||||
|
aftB := leafnodes.NewAfterEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||||
|
jusBefA := leafnodes.NewJustBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||||
|
jusBefB := leafnodes.NewJustBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||||
|
|
||||||
|
container.PushSetupNode(befA)
|
||||||
|
container.PushSetupNode(befB)
|
||||||
|
container.PushSetupNode(aftA)
|
||||||
|
container.PushSetupNode(aftB)
|
||||||
|
container.PushSetupNode(jusBefA)
|
||||||
|
container.PushSetupNode(jusBefB)
|
||||||
|
|
||||||
|
subject := leafnodes.NewItNode("subject", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||||
|
container.PushSubjectNode(subject)
|
||||||
|
|
||||||
|
Ω(container.SetupNodesOfType(types.SpecComponentTypeBeforeEach)).Should(Equal([]leafnodes.BasicNode{befA, befB}))
|
||||||
|
Ω(container.SetupNodesOfType(types.SpecComponentTypeAfterEach)).Should(Equal([]leafnodes.BasicNode{aftA, aftB}))
|
||||||
|
Ω(container.SetupNodesOfType(types.SpecComponentTypeJustBeforeEach)).Should(Equal([]leafnodes.BasicNode{jusBefA, jusBefB}))
|
||||||
|
Ω(container.SetupNodesOfType(types.SpecComponentTypeIt)).Should(BeEmpty()) //subjects are not setup nodes
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("With appended containers and subject nodes", func() {
|
||||||
|
var (
|
||||||
|
itA, itB, innerItA, innerItB leafnodes.SubjectNode
|
||||||
|
innerContainer *ContainerNode
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
itA = leafnodes.NewItNode("Banana", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||||
|
itB = leafnodes.NewItNode("Apple", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||||
|
|
||||||
|
innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||||
|
innerItB = leafnodes.NewItNode("inner B", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||||
|
|
||||||
|
innerContainer = New("Orange", types.FlagTypeNone, codelocation.New(0))
|
||||||
|
|
||||||
|
container.PushSubjectNode(itA)
|
||||||
|
container.PushContainerNode(innerContainer)
|
||||||
|
innerContainer.PushSubjectNode(innerItA)
|
||||||
|
innerContainer.PushSubjectNode(innerItB)
|
||||||
|
container.PushSubjectNode(itB)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Collating", func() {
|
||||||
|
It("should return a collated set of containers and subject nodes in the correct order", func() {
|
||||||
|
collated := container.Collate()
|
||||||
|
Ω(collated).Should(HaveLen(4))
|
||||||
|
|
||||||
|
Ω(collated[0]).Should(Equal(CollatedNodes{
|
||||||
|
Containers: []*ContainerNode{container},
|
||||||
|
Subject: itA,
|
||||||
|
}))
|
||||||
|
|
||||||
|
Ω(collated[1]).Should(Equal(CollatedNodes{
|
||||||
|
Containers: []*ContainerNode{container, innerContainer},
|
||||||
|
Subject: innerItA,
|
||||||
|
}))
|
||||||
|
|
||||||
|
Ω(collated[2]).Should(Equal(CollatedNodes{
|
||||||
|
Containers: []*ContainerNode{container, innerContainer},
|
||||||
|
Subject: innerItB,
|
||||||
|
}))
|
||||||
|
|
||||||
|
Ω(collated[3]).Should(Equal(CollatedNodes{
|
||||||
|
Containers: []*ContainerNode{container},
|
||||||
|
Subject: itB,
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Backpropagating Programmatic Focus", func() {
|
||||||
|
//This allows inner focused specs to override the focus of outer focussed
|
||||||
|
//specs and more closely maps to what a developer wants to happen
|
||||||
|
//when debugging a test suite
|
||||||
|
|
||||||
|
Context("when a parent is focused *and* an inner subject is focused", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
container = New("description text", types.FlagTypeFocused, codeLocation)
|
||||||
|
itA = leafnodes.NewItNode("A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||||
|
container.PushSubjectNode(itA)
|
||||||
|
|
||||||
|
innerContainer = New("Orange", types.FlagTypeNone, codelocation.New(0))
|
||||||
|
container.PushContainerNode(innerContainer)
|
||||||
|
innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeFocused, codelocation.New(0), 0, nil, 0)
|
||||||
|
innerContainer.PushSubjectNode(innerItA)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should unfocus the parent", func() {
|
||||||
|
container.BackPropagateProgrammaticFocus()
|
||||||
|
|
||||||
|
Ω(container.Flag()).Should(Equal(types.FlagTypeNone))
|
||||||
|
Ω(itA.Flag()).Should(Equal(types.FlagTypeNone))
|
||||||
|
Ω(innerContainer.Flag()).Should(Equal(types.FlagTypeNone))
|
||||||
|
Ω(innerItA.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when a parent is focused *and* an inner container is focused", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
container = New("description text", types.FlagTypeFocused, codeLocation)
|
||||||
|
itA = leafnodes.NewItNode("A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||||
|
container.PushSubjectNode(itA)
|
||||||
|
|
||||||
|
innerContainer = New("Orange", types.FlagTypeFocused, codelocation.New(0))
|
||||||
|
container.PushContainerNode(innerContainer)
|
||||||
|
innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||||
|
innerContainer.PushSubjectNode(innerItA)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should unfocus the parent", func() {
|
||||||
|
container.BackPropagateProgrammaticFocus()
|
||||||
|
|
||||||
|
Ω(container.Flag()).Should(Equal(types.FlagTypeNone))
|
||||||
|
Ω(itA.Flag()).Should(Equal(types.FlagTypeNone))
|
||||||
|
Ω(innerContainer.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||||
|
Ω(innerItA.Flag()).Should(Equal(types.FlagTypeNone))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when a parent is pending and a child is focused", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
container = New("description text", types.FlagTypeFocused, codeLocation)
|
||||||
|
itA = leafnodes.NewItNode("A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||||
|
container.PushSubjectNode(itA)
|
||||||
|
|
||||||
|
innerContainer = New("Orange", types.FlagTypePending, codelocation.New(0))
|
||||||
|
container.PushContainerNode(innerContainer)
|
||||||
|
innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeFocused, codelocation.New(0), 0, nil, 0)
|
||||||
|
innerContainer.PushSubjectNode(innerItA)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not do anything", func() {
|
||||||
|
container.BackPropagateProgrammaticFocus()
|
||||||
|
|
||||||
|
Ω(container.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||||
|
Ω(itA.Flag()).Should(Equal(types.FlagTypeNone))
|
||||||
|
Ω(innerContainer.Flag()).Should(Equal(types.FlagTypePending))
|
||||||
|
Ω(innerItA.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Shuffling", func() {
|
||||||
|
var unshuffledCollation []CollatedNodes
|
||||||
|
BeforeEach(func() {
|
||||||
|
unshuffledCollation = container.Collate()
|
||||||
|
|
||||||
|
r := rand.New(rand.NewSource(17))
|
||||||
|
container.Shuffle(r)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should sort, and then shuffle, the top level contents of the container", func() {
|
||||||
|
shuffledCollation := container.Collate()
|
||||||
|
Ω(shuffledCollation).Should(HaveLen(len(unshuffledCollation)))
|
||||||
|
Ω(shuffledCollation).ShouldNot(Equal(unshuffledCollation))
|
||||||
|
|
||||||
|
for _, entry := range unshuffledCollation {
|
||||||
|
Ω(shuffledCollation).Should(ContainElement(entry))
|
||||||
|
}
|
||||||
|
|
||||||
|
innerAIndex, innerBIndex := 0, 0
|
||||||
|
for i, entry := range shuffledCollation {
|
||||||
|
if entry.Subject == innerItA {
|
||||||
|
innerAIndex = i
|
||||||
|
} else if entry.Subject == innerItB {
|
||||||
|
innerBIndex = i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ω(innerAIndex).Should(Equal(innerBIndex - 1))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
92
vendor/github.com/onsi/ginkgo/internal/failer/failer.go
generated
vendored
Normal file
92
vendor/github.com/onsi/ginkgo/internal/failer/failer.go
generated
vendored
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
package failer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Failer struct {
|
||||||
|
lock *sync.Mutex
|
||||||
|
failure types.SpecFailure
|
||||||
|
state types.SpecState
|
||||||
|
}
|
||||||
|
|
||||||
|
func New() *Failer {
|
||||||
|
return &Failer{
|
||||||
|
lock: &sync.Mutex{},
|
||||||
|
state: types.SpecStatePassed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Failer) Panic(location types.CodeLocation, forwardedPanic interface{}) {
|
||||||
|
f.lock.Lock()
|
||||||
|
defer f.lock.Unlock()
|
||||||
|
|
||||||
|
if f.state == types.SpecStatePassed {
|
||||||
|
f.state = types.SpecStatePanicked
|
||||||
|
f.failure = types.SpecFailure{
|
||||||
|
Message: "Test Panicked",
|
||||||
|
Location: location,
|
||||||
|
ForwardedPanic: fmt.Sprintf("%v", forwardedPanic),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Failer) Timeout(location types.CodeLocation) {
|
||||||
|
f.lock.Lock()
|
||||||
|
defer f.lock.Unlock()
|
||||||
|
|
||||||
|
if f.state == types.SpecStatePassed {
|
||||||
|
f.state = types.SpecStateTimedOut
|
||||||
|
f.failure = types.SpecFailure{
|
||||||
|
Message: "Timed out",
|
||||||
|
Location: location,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Failer) Fail(message string, location types.CodeLocation) {
|
||||||
|
f.lock.Lock()
|
||||||
|
defer f.lock.Unlock()
|
||||||
|
|
||||||
|
if f.state == types.SpecStatePassed {
|
||||||
|
f.state = types.SpecStateFailed
|
||||||
|
f.failure = types.SpecFailure{
|
||||||
|
Message: message,
|
||||||
|
Location: location,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Failer) Drain(componentType types.SpecComponentType, componentIndex int, componentCodeLocation types.CodeLocation) (types.SpecFailure, types.SpecState) {
|
||||||
|
f.lock.Lock()
|
||||||
|
defer f.lock.Unlock()
|
||||||
|
|
||||||
|
failure := f.failure
|
||||||
|
outcome := f.state
|
||||||
|
if outcome != types.SpecStatePassed {
|
||||||
|
failure.ComponentType = componentType
|
||||||
|
failure.ComponentIndex = componentIndex
|
||||||
|
failure.ComponentCodeLocation = componentCodeLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
f.state = types.SpecStatePassed
|
||||||
|
f.failure = types.SpecFailure{}
|
||||||
|
|
||||||
|
return failure, outcome
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *Failer) Skip(message string, location types.CodeLocation) {
|
||||||
|
f.lock.Lock()
|
||||||
|
defer f.lock.Unlock()
|
||||||
|
|
||||||
|
if f.state == types.SpecStatePassed {
|
||||||
|
f.state = types.SpecStateSkipped
|
||||||
|
f.failure = types.SpecFailure{
|
||||||
|
Message: message,
|
||||||
|
Location: location,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
13
vendor/github.com/onsi/ginkgo/internal/failer/failer_suite_test.go
generated
vendored
Normal file
13
vendor/github.com/onsi/ginkgo/internal/failer/failer_suite_test.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package failer_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFailer(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Failer Suite")
|
||||||
|
}
|
141
vendor/github.com/onsi/ginkgo/internal/failer/failer_test.go
generated
vendored
Normal file
141
vendor/github.com/onsi/ginkgo/internal/failer/failer_test.go
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
package failer_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/ginkgo/internal/failer"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Failer", func() {
|
||||||
|
var (
|
||||||
|
failer *Failer
|
||||||
|
codeLocationA types.CodeLocation
|
||||||
|
codeLocationB types.CodeLocation
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
codeLocationA = codelocation.New(0)
|
||||||
|
codeLocationB = codelocation.New(0)
|
||||||
|
failer = New()
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("with no failures", func() {
|
||||||
|
It("should return success when drained", func() {
|
||||||
|
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
Ω(failure).Should(BeZero())
|
||||||
|
Ω(state).Should(Equal(types.SpecStatePassed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Skip", func() {
|
||||||
|
It("should handle failures", func() {
|
||||||
|
failer.Skip("something skipped", codeLocationA)
|
||||||
|
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
Ω(failure).Should(Equal(types.SpecFailure{
|
||||||
|
Message: "something skipped",
|
||||||
|
Location: codeLocationA,
|
||||||
|
ForwardedPanic: "",
|
||||||
|
ComponentType: types.SpecComponentTypeIt,
|
||||||
|
ComponentIndex: 3,
|
||||||
|
ComponentCodeLocation: codeLocationB,
|
||||||
|
}))
|
||||||
|
Ω(state).Should(Equal(types.SpecStateSkipped))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Fail", func() {
|
||||||
|
It("should handle failures", func() {
|
||||||
|
failer.Fail("something failed", codeLocationA)
|
||||||
|
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
Ω(failure).Should(Equal(types.SpecFailure{
|
||||||
|
Message: "something failed",
|
||||||
|
Location: codeLocationA,
|
||||||
|
ForwardedPanic: "",
|
||||||
|
ComponentType: types.SpecComponentTypeIt,
|
||||||
|
ComponentIndex: 3,
|
||||||
|
ComponentCodeLocation: codeLocationB,
|
||||||
|
}))
|
||||||
|
Ω(state).Should(Equal(types.SpecStateFailed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Panic", func() {
|
||||||
|
It("should handle panics", func() {
|
||||||
|
failer.Panic(codeLocationA, "some forwarded panic")
|
||||||
|
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
Ω(failure).Should(Equal(types.SpecFailure{
|
||||||
|
Message: "Test Panicked",
|
||||||
|
Location: codeLocationA,
|
||||||
|
ForwardedPanic: "some forwarded panic",
|
||||||
|
ComponentType: types.SpecComponentTypeIt,
|
||||||
|
ComponentIndex: 3,
|
||||||
|
ComponentCodeLocation: codeLocationB,
|
||||||
|
}))
|
||||||
|
Ω(state).Should(Equal(types.SpecStatePanicked))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Timeout", func() {
|
||||||
|
It("should handle timeouts", func() {
|
||||||
|
failer.Timeout(codeLocationA)
|
||||||
|
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
Ω(failure).Should(Equal(types.SpecFailure{
|
||||||
|
Message: "Timed out",
|
||||||
|
Location: codeLocationA,
|
||||||
|
ForwardedPanic: "",
|
||||||
|
ComponentType: types.SpecComponentTypeIt,
|
||||||
|
ComponentIndex: 3,
|
||||||
|
ComponentCodeLocation: codeLocationB,
|
||||||
|
}))
|
||||||
|
Ω(state).Should(Equal(types.SpecStateTimedOut))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when multiple failures are registered", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
failer.Fail("something failed", codeLocationA)
|
||||||
|
failer.Fail("something else failed", codeLocationA)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should only report the first one when drained", func() {
|
||||||
|
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
|
||||||
|
Ω(failure).Should(Equal(types.SpecFailure{
|
||||||
|
Message: "something failed",
|
||||||
|
Location: codeLocationA,
|
||||||
|
ForwardedPanic: "",
|
||||||
|
ComponentType: types.SpecComponentTypeIt,
|
||||||
|
ComponentIndex: 3,
|
||||||
|
ComponentCodeLocation: codeLocationB,
|
||||||
|
}))
|
||||||
|
Ω(state).Should(Equal(types.SpecStateFailed))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report subsequent failures after being drained", func() {
|
||||||
|
failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
failer.Fail("yet another thing failed", codeLocationA)
|
||||||
|
|
||||||
|
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
|
||||||
|
Ω(failure).Should(Equal(types.SpecFailure{
|
||||||
|
Message: "yet another thing failed",
|
||||||
|
Location: codeLocationA,
|
||||||
|
ForwardedPanic: "",
|
||||||
|
ComponentType: types.SpecComponentTypeIt,
|
||||||
|
ComponentIndex: 3,
|
||||||
|
ComponentCodeLocation: codeLocationB,
|
||||||
|
}))
|
||||||
|
Ω(state).Should(Equal(types.SpecStateFailed))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report sucess on subsequent drains if no errors occur", func() {
|
||||||
|
failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||||
|
Ω(failure).Should(BeZero())
|
||||||
|
Ω(state).Should(Equal(types.SpecStatePassed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
103
vendor/github.com/onsi/ginkgo/internal/leafnodes/benchmarker.go
generated
vendored
Normal file
103
vendor/github.com/onsi/ginkgo/internal/leafnodes/benchmarker.go
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
package leafnodes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type benchmarker struct {
|
||||||
|
mu sync.Mutex
|
||||||
|
measurements map[string]*types.SpecMeasurement
|
||||||
|
orderCounter int
|
||||||
|
}
|
||||||
|
|
||||||
|
func newBenchmarker() *benchmarker {
|
||||||
|
return &benchmarker{
|
||||||
|
measurements: make(map[string]*types.SpecMeasurement, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *benchmarker) Time(name string, body func(), info ...interface{}) (elapsedTime time.Duration) {
|
||||||
|
t := time.Now()
|
||||||
|
body()
|
||||||
|
elapsedTime = time.Since(t)
|
||||||
|
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
measurement := b.getMeasurement(name, "Fastest Time", "Slowest Time", "Average Time", "s", 3, info...)
|
||||||
|
measurement.Results = append(measurement.Results, elapsedTime.Seconds())
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *benchmarker) RecordValue(name string, value float64, info ...interface{}) {
|
||||||
|
measurement := b.getMeasurement(name, "Smallest", " Largest", " Average", "", 3, info...)
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
measurement.Results = append(measurement.Results, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *benchmarker) RecordValueWithPrecision(name string, value float64, units string, precision int, info ...interface{}) {
|
||||||
|
measurement := b.getMeasurement(name, "Smallest", " Largest", " Average", units, precision, info...)
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
measurement.Results = append(measurement.Results, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *benchmarker) getMeasurement(name string, smallestLabel string, largestLabel string, averageLabel string, units string, precision int, info ...interface{}) *types.SpecMeasurement {
|
||||||
|
measurement, ok := b.measurements[name]
|
||||||
|
if !ok {
|
||||||
|
var computedInfo interface{}
|
||||||
|
computedInfo = nil
|
||||||
|
if len(info) > 0 {
|
||||||
|
computedInfo = info[0]
|
||||||
|
}
|
||||||
|
measurement = &types.SpecMeasurement{
|
||||||
|
Name: name,
|
||||||
|
Info: computedInfo,
|
||||||
|
Order: b.orderCounter,
|
||||||
|
SmallestLabel: smallestLabel,
|
||||||
|
LargestLabel: largestLabel,
|
||||||
|
AverageLabel: averageLabel,
|
||||||
|
Units: units,
|
||||||
|
Precision: precision,
|
||||||
|
Results: make([]float64, 0),
|
||||||
|
}
|
||||||
|
b.measurements[name] = measurement
|
||||||
|
b.orderCounter++
|
||||||
|
}
|
||||||
|
|
||||||
|
return measurement
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *benchmarker) measurementsReport() map[string]*types.SpecMeasurement {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
for _, measurement := range b.measurements {
|
||||||
|
measurement.Smallest = math.MaxFloat64
|
||||||
|
measurement.Largest = -math.MaxFloat64
|
||||||
|
sum := float64(0)
|
||||||
|
sumOfSquares := float64(0)
|
||||||
|
|
||||||
|
for _, result := range measurement.Results {
|
||||||
|
if result > measurement.Largest {
|
||||||
|
measurement.Largest = result
|
||||||
|
}
|
||||||
|
if result < measurement.Smallest {
|
||||||
|
measurement.Smallest = result
|
||||||
|
}
|
||||||
|
sum += result
|
||||||
|
sumOfSquares += result * result
|
||||||
|
}
|
||||||
|
|
||||||
|
n := float64(len(measurement.Results))
|
||||||
|
measurement.Average = sum / n
|
||||||
|
measurement.StdDeviation = math.Sqrt(sumOfSquares/n - (sum/n)*(sum/n))
|
||||||
|
}
|
||||||
|
|
||||||
|
return b.measurements
|
||||||
|
}
|
19
vendor/github.com/onsi/ginkgo/internal/leafnodes/interfaces.go
generated
vendored
Normal file
19
vendor/github.com/onsi/ginkgo/internal/leafnodes/interfaces.go
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
package leafnodes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type BasicNode interface {
|
||||||
|
Type() types.SpecComponentType
|
||||||
|
Run() (types.SpecState, types.SpecFailure)
|
||||||
|
CodeLocation() types.CodeLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
type SubjectNode interface {
|
||||||
|
BasicNode
|
||||||
|
|
||||||
|
Text() string
|
||||||
|
Flag() types.FlagType
|
||||||
|
Samples() int
|
||||||
|
}
|
46
vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node.go
generated
vendored
Normal file
46
vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node.go
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
package leafnodes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ItNode struct {
|
||||||
|
runner *runner
|
||||||
|
|
||||||
|
flag types.FlagType
|
||||||
|
text string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewItNode(text string, body interface{}, flag types.FlagType, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, componentIndex int) *ItNode {
|
||||||
|
return &ItNode{
|
||||||
|
runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeIt, componentIndex),
|
||||||
|
flag: flag,
|
||||||
|
text: text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ItNode) Run() (outcome types.SpecState, failure types.SpecFailure) {
|
||||||
|
return node.runner.run()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ItNode) Type() types.SpecComponentType {
|
||||||
|
return types.SpecComponentTypeIt
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ItNode) Text() string {
|
||||||
|
return node.text
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ItNode) Flag() types.FlagType {
|
||||||
|
return node.flag
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ItNode) CodeLocation() types.CodeLocation {
|
||||||
|
return node.runner.codeLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *ItNode) Samples() int {
|
||||||
|
return 1
|
||||||
|
}
|
22
vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node_test.go
generated
vendored
Normal file
22
vendor/github.com/onsi/ginkgo/internal/leafnodes/it_node_test.go
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
package leafnodes_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("It Nodes", func() {
|
||||||
|
It("should report the correct type, text, flag, and code location", func() {
|
||||||
|
codeLocation := codelocation.New(0)
|
||||||
|
it := NewItNode("my it node", func() {}, types.FlagTypeFocused, codeLocation, 0, nil, 3)
|
||||||
|
Ω(it.Type()).Should(Equal(types.SpecComponentTypeIt))
|
||||||
|
Ω(it.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||||
|
Ω(it.Text()).Should(Equal("my it node"))
|
||||||
|
Ω(it.CodeLocation()).Should(Equal(codeLocation))
|
||||||
|
Ω(it.Samples()).Should(Equal(1))
|
||||||
|
})
|
||||||
|
})
|
13
vendor/github.com/onsi/ginkgo/internal/leafnodes/leaf_node_suite_test.go
generated
vendored
Normal file
13
vendor/github.com/onsi/ginkgo/internal/leafnodes/leaf_node_suite_test.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package leafnodes_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLeafNode(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "LeafNode Suite")
|
||||||
|
}
|
61
vendor/github.com/onsi/ginkgo/internal/leafnodes/measure_node.go
generated
vendored
Normal file
61
vendor/github.com/onsi/ginkgo/internal/leafnodes/measure_node.go
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
package leafnodes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MeasureNode struct {
|
||||||
|
runner *runner
|
||||||
|
|
||||||
|
text string
|
||||||
|
flag types.FlagType
|
||||||
|
samples int
|
||||||
|
benchmarker *benchmarker
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewMeasureNode(text string, body interface{}, flag types.FlagType, codeLocation types.CodeLocation, samples int, failer *failer.Failer, componentIndex int) *MeasureNode {
|
||||||
|
benchmarker := newBenchmarker()
|
||||||
|
|
||||||
|
wrappedBody := func() {
|
||||||
|
reflect.ValueOf(body).Call([]reflect.Value{reflect.ValueOf(benchmarker)})
|
||||||
|
}
|
||||||
|
|
||||||
|
return &MeasureNode{
|
||||||
|
runner: newRunner(wrappedBody, codeLocation, 0, failer, types.SpecComponentTypeMeasure, componentIndex),
|
||||||
|
|
||||||
|
text: text,
|
||||||
|
flag: flag,
|
||||||
|
samples: samples,
|
||||||
|
benchmarker: benchmarker,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *MeasureNode) Run() (outcome types.SpecState, failure types.SpecFailure) {
|
||||||
|
return node.runner.run()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *MeasureNode) MeasurementsReport() map[string]*types.SpecMeasurement {
|
||||||
|
return node.benchmarker.measurementsReport()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *MeasureNode) Type() types.SpecComponentType {
|
||||||
|
return types.SpecComponentTypeMeasure
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *MeasureNode) Text() string {
|
||||||
|
return node.text
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *MeasureNode) Flag() types.FlagType {
|
||||||
|
return node.flag
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *MeasureNode) CodeLocation() types.CodeLocation {
|
||||||
|
return node.runner.codeLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *MeasureNode) Samples() int {
|
||||||
|
return node.samples
|
||||||
|
}
|
154
vendor/github.com/onsi/ginkgo/internal/leafnodes/measure_node_test.go
generated
vendored
Normal file
154
vendor/github.com/onsi/ginkgo/internal/leafnodes/measure_node_test.go
generated
vendored
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
package leafnodes_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Measure Nodes", func() {
|
||||||
|
It("should report the correct type, text, flag, and code location", func() {
|
||||||
|
codeLocation := codelocation.New(0)
|
||||||
|
measure := NewMeasureNode("my measure node", func(b Benchmarker) {}, types.FlagTypeFocused, codeLocation, 10, nil, 3)
|
||||||
|
Ω(measure.Type()).Should(Equal(types.SpecComponentTypeMeasure))
|
||||||
|
Ω(measure.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||||
|
Ω(measure.Text()).Should(Equal("my measure node"))
|
||||||
|
Ω(measure.CodeLocation()).Should(Equal(codeLocation))
|
||||||
|
Ω(measure.Samples()).Should(Equal(10))
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("benchmarking", func() {
|
||||||
|
var measure *MeasureNode
|
||||||
|
|
||||||
|
Describe("Value", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
measure = NewMeasureNode("the measurement", func(b Benchmarker) {
|
||||||
|
b.RecordValue("foo", 7, "info!")
|
||||||
|
b.RecordValue("foo", 2)
|
||||||
|
b.RecordValue("foo", 3)
|
||||||
|
b.RecordValue("bar", 0.3)
|
||||||
|
b.RecordValue("bar", 0.1)
|
||||||
|
b.RecordValue("bar", 0.5)
|
||||||
|
b.RecordValue("bar", 0.7)
|
||||||
|
}, types.FlagTypeFocused, codelocation.New(0), 1, Failer.New(), 3)
|
||||||
|
Ω(measure.Run()).Should(Equal(types.SpecStatePassed))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("records passed in values and reports on them", func() {
|
||||||
|
report := measure.MeasurementsReport()
|
||||||
|
Ω(report).Should(HaveLen(2))
|
||||||
|
Ω(report["foo"].Name).Should(Equal("foo"))
|
||||||
|
Ω(report["foo"].Info).Should(Equal("info!"))
|
||||||
|
Ω(report["foo"].Order).Should(Equal(0))
|
||||||
|
Ω(report["foo"].SmallestLabel).Should(Equal("Smallest"))
|
||||||
|
Ω(report["foo"].LargestLabel).Should(Equal(" Largest"))
|
||||||
|
Ω(report["foo"].AverageLabel).Should(Equal(" Average"))
|
||||||
|
Ω(report["foo"].Units).Should(Equal(""))
|
||||||
|
Ω(report["foo"].Results).Should(Equal([]float64{7, 2, 3}))
|
||||||
|
Ω(report["foo"].Smallest).Should(BeNumerically("==", 2))
|
||||||
|
Ω(report["foo"].Largest).Should(BeNumerically("==", 7))
|
||||||
|
Ω(report["foo"].Average).Should(BeNumerically("==", 4))
|
||||||
|
Ω(report["foo"].StdDeviation).Should(BeNumerically("~", 2.16, 0.01))
|
||||||
|
|
||||||
|
Ω(report["bar"].Name).Should(Equal("bar"))
|
||||||
|
Ω(report["bar"].Info).Should(BeNil())
|
||||||
|
Ω(report["bar"].SmallestLabel).Should(Equal("Smallest"))
|
||||||
|
Ω(report["bar"].Order).Should(Equal(1))
|
||||||
|
Ω(report["bar"].LargestLabel).Should(Equal(" Largest"))
|
||||||
|
Ω(report["bar"].AverageLabel).Should(Equal(" Average"))
|
||||||
|
Ω(report["bar"].Units).Should(Equal(""))
|
||||||
|
Ω(report["bar"].Results).Should(Equal([]float64{0.3, 0.1, 0.5, 0.7}))
|
||||||
|
Ω(report["bar"].Smallest).Should(BeNumerically("==", 0.1))
|
||||||
|
Ω(report["bar"].Largest).Should(BeNumerically("==", 0.7))
|
||||||
|
Ω(report["bar"].Average).Should(BeNumerically("==", 0.4))
|
||||||
|
Ω(report["bar"].StdDeviation).Should(BeNumerically("~", 0.22, 0.01))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Value with precision", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
measure = NewMeasureNode("the measurement", func(b Benchmarker) {
|
||||||
|
b.RecordValueWithPrecision("foo", 7, "ms", 7, "info!")
|
||||||
|
b.RecordValueWithPrecision("foo", 2, "ms", 6)
|
||||||
|
b.RecordValueWithPrecision("foo", 3, "ms", 5)
|
||||||
|
b.RecordValueWithPrecision("bar", 0.3, "ns", 4)
|
||||||
|
b.RecordValueWithPrecision("bar", 0.1, "ns", 3)
|
||||||
|
b.RecordValueWithPrecision("bar", 0.5, "ns", 2)
|
||||||
|
b.RecordValueWithPrecision("bar", 0.7, "ns", 1)
|
||||||
|
}, types.FlagTypeFocused, codelocation.New(0), 1, Failer.New(), 3)
|
||||||
|
Ω(measure.Run()).Should(Equal(types.SpecStatePassed))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("records passed in values and reports on them", func() {
|
||||||
|
report := measure.MeasurementsReport()
|
||||||
|
Ω(report).Should(HaveLen(2))
|
||||||
|
Ω(report["foo"].Name).Should(Equal("foo"))
|
||||||
|
Ω(report["foo"].Info).Should(Equal("info!"))
|
||||||
|
Ω(report["foo"].Order).Should(Equal(0))
|
||||||
|
Ω(report["foo"].SmallestLabel).Should(Equal("Smallest"))
|
||||||
|
Ω(report["foo"].LargestLabel).Should(Equal(" Largest"))
|
||||||
|
Ω(report["foo"].AverageLabel).Should(Equal(" Average"))
|
||||||
|
Ω(report["foo"].Units).Should(Equal("ms"))
|
||||||
|
Ω(report["foo"].Results).Should(Equal([]float64{7, 2, 3}))
|
||||||
|
Ω(report["foo"].Smallest).Should(BeNumerically("==", 2))
|
||||||
|
Ω(report["foo"].Largest).Should(BeNumerically("==", 7))
|
||||||
|
Ω(report["foo"].Average).Should(BeNumerically("==", 4))
|
||||||
|
Ω(report["foo"].StdDeviation).Should(BeNumerically("~", 2.16, 0.01))
|
||||||
|
|
||||||
|
Ω(report["bar"].Name).Should(Equal("bar"))
|
||||||
|
Ω(report["bar"].Info).Should(BeNil())
|
||||||
|
Ω(report["bar"].SmallestLabel).Should(Equal("Smallest"))
|
||||||
|
Ω(report["bar"].Order).Should(Equal(1))
|
||||||
|
Ω(report["bar"].LargestLabel).Should(Equal(" Largest"))
|
||||||
|
Ω(report["bar"].AverageLabel).Should(Equal(" Average"))
|
||||||
|
Ω(report["bar"].Units).Should(Equal("ns"))
|
||||||
|
Ω(report["bar"].Results).Should(Equal([]float64{0.3, 0.1, 0.5, 0.7}))
|
||||||
|
Ω(report["bar"].Smallest).Should(BeNumerically("==", 0.1))
|
||||||
|
Ω(report["bar"].Largest).Should(BeNumerically("==", 0.7))
|
||||||
|
Ω(report["bar"].Average).Should(BeNumerically("==", 0.4))
|
||||||
|
Ω(report["bar"].StdDeviation).Should(BeNumerically("~", 0.22, 0.01))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Time", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
measure = NewMeasureNode("the measurement", func(b Benchmarker) {
|
||||||
|
b.Time("foo", func() {
|
||||||
|
time.Sleep(100 * time.Millisecond)
|
||||||
|
}, "info!")
|
||||||
|
b.Time("foo", func() {
|
||||||
|
time.Sleep(200 * time.Millisecond)
|
||||||
|
})
|
||||||
|
b.Time("foo", func() {
|
||||||
|
time.Sleep(170 * time.Millisecond)
|
||||||
|
})
|
||||||
|
}, types.FlagTypeFocused, codelocation.New(0), 1, Failer.New(), 3)
|
||||||
|
Ω(measure.Run()).Should(Equal(types.SpecStatePassed))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("records passed in values and reports on them", func() {
|
||||||
|
report := measure.MeasurementsReport()
|
||||||
|
Ω(report).Should(HaveLen(1))
|
||||||
|
Ω(report["foo"].Name).Should(Equal("foo"))
|
||||||
|
Ω(report["foo"].Info).Should(Equal("info!"))
|
||||||
|
Ω(report["foo"].SmallestLabel).Should(Equal("Fastest Time"))
|
||||||
|
Ω(report["foo"].LargestLabel).Should(Equal("Slowest Time"))
|
||||||
|
Ω(report["foo"].AverageLabel).Should(Equal("Average Time"))
|
||||||
|
Ω(report["foo"].Units).Should(Equal("s"))
|
||||||
|
Ω(report["foo"].Results).Should(HaveLen(3))
|
||||||
|
Ω(report["foo"].Results[0]).Should(BeNumerically("~", 0.1, 0.01))
|
||||||
|
Ω(report["foo"].Results[1]).Should(BeNumerically("~", 0.2, 0.01))
|
||||||
|
Ω(report["foo"].Results[2]).Should(BeNumerically("~", 0.17, 0.01))
|
||||||
|
Ω(report["foo"].Smallest).Should(BeNumerically("~", 0.1, 0.01))
|
||||||
|
Ω(report["foo"].Largest).Should(BeNumerically("~", 0.2, 0.01))
|
||||||
|
Ω(report["foo"].Average).Should(BeNumerically("~", 0.16, 0.01))
|
||||||
|
Ω(report["foo"].StdDeviation).Should(BeNumerically("~", 0.04, 0.01))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
113
vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go
generated
vendored
Normal file
113
vendor/github.com/onsi/ginkgo/internal/leafnodes/runner.go
generated
vendored
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
package leafnodes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
"github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"reflect"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type runner struct {
|
||||||
|
isAsync bool
|
||||||
|
asyncFunc func(chan<- interface{})
|
||||||
|
syncFunc func()
|
||||||
|
codeLocation types.CodeLocation
|
||||||
|
timeoutThreshold time.Duration
|
||||||
|
nodeType types.SpecComponentType
|
||||||
|
componentIndex int
|
||||||
|
failer *failer.Failer
|
||||||
|
}
|
||||||
|
|
||||||
|
func newRunner(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, nodeType types.SpecComponentType, componentIndex int) *runner {
|
||||||
|
bodyType := reflect.TypeOf(body)
|
||||||
|
if bodyType.Kind() != reflect.Func {
|
||||||
|
panic(fmt.Sprintf("Expected a function but got something else at %v", codeLocation))
|
||||||
|
}
|
||||||
|
|
||||||
|
runner := &runner{
|
||||||
|
codeLocation: codeLocation,
|
||||||
|
timeoutThreshold: timeout,
|
||||||
|
failer: failer,
|
||||||
|
nodeType: nodeType,
|
||||||
|
componentIndex: componentIndex,
|
||||||
|
}
|
||||||
|
|
||||||
|
switch bodyType.NumIn() {
|
||||||
|
case 0:
|
||||||
|
runner.syncFunc = body.(func())
|
||||||
|
return runner
|
||||||
|
case 1:
|
||||||
|
if !(bodyType.In(0).Kind() == reflect.Chan && bodyType.In(0).Elem().Kind() == reflect.Interface) {
|
||||||
|
panic(fmt.Sprintf("Must pass a Done channel to function at %v", codeLocation))
|
||||||
|
}
|
||||||
|
|
||||||
|
wrappedBody := func(done chan<- interface{}) {
|
||||||
|
bodyValue := reflect.ValueOf(body)
|
||||||
|
bodyValue.Call([]reflect.Value{reflect.ValueOf(done)})
|
||||||
|
}
|
||||||
|
|
||||||
|
runner.isAsync = true
|
||||||
|
runner.asyncFunc = wrappedBody
|
||||||
|
return runner
|
||||||
|
}
|
||||||
|
|
||||||
|
panic(fmt.Sprintf("Too many arguments to function at %v", codeLocation))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *runner) run() (outcome types.SpecState, failure types.SpecFailure) {
|
||||||
|
if r.isAsync {
|
||||||
|
return r.runAsync()
|
||||||
|
} else {
|
||||||
|
return r.runSync()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *runner) runAsync() (outcome types.SpecState, failure types.SpecFailure) {
|
||||||
|
done := make(chan interface{}, 1)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
finished := false
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if e := recover(); e != nil || !finished {
|
||||||
|
r.failer.Panic(codelocation.New(2), e)
|
||||||
|
select {
|
||||||
|
case <-done:
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
close(done)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
r.asyncFunc(done)
|
||||||
|
finished = true
|
||||||
|
}()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-done:
|
||||||
|
case <-time.After(r.timeoutThreshold):
|
||||||
|
r.failer.Timeout(r.codeLocation)
|
||||||
|
}
|
||||||
|
|
||||||
|
failure, outcome = r.failer.Drain(r.nodeType, r.componentIndex, r.codeLocation)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
func (r *runner) runSync() (outcome types.SpecState, failure types.SpecFailure) {
|
||||||
|
finished := false
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if e := recover(); e != nil || !finished {
|
||||||
|
r.failer.Panic(codelocation.New(2), e)
|
||||||
|
}
|
||||||
|
|
||||||
|
failure, outcome = r.failer.Drain(r.nodeType, r.componentIndex, r.codeLocation)
|
||||||
|
}()
|
||||||
|
|
||||||
|
r.syncFunc()
|
||||||
|
finished = true
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
41
vendor/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes.go
generated
vendored
Normal file
41
vendor/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes.go
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
package leafnodes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SetupNode struct {
|
||||||
|
runner *runner
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *SetupNode) Run() (outcome types.SpecState, failure types.SpecFailure) {
|
||||||
|
return node.runner.run()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *SetupNode) Type() types.SpecComponentType {
|
||||||
|
return node.runner.nodeType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *SetupNode) CodeLocation() types.CodeLocation {
|
||||||
|
return node.runner.codeLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBeforeEachNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, componentIndex int) *SetupNode {
|
||||||
|
return &SetupNode{
|
||||||
|
runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeBeforeEach, componentIndex),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAfterEachNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, componentIndex int) *SetupNode {
|
||||||
|
return &SetupNode{
|
||||||
|
runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeAfterEach, componentIndex),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewJustBeforeEachNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, componentIndex int) *SetupNode {
|
||||||
|
return &SetupNode{
|
||||||
|
runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeJustBeforeEach, componentIndex),
|
||||||
|
}
|
||||||
|
}
|
40
vendor/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes_test.go
generated
vendored
Normal file
40
vendor/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes_test.go
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
package leafnodes_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Setup Nodes", func() {
|
||||||
|
Describe("BeforeEachNodes", func() {
|
||||||
|
It("should report the correct type and code location", func() {
|
||||||
|
codeLocation := codelocation.New(0)
|
||||||
|
beforeEach := NewBeforeEachNode(func() {}, codeLocation, 0, nil, 3)
|
||||||
|
Ω(beforeEach.Type()).Should(Equal(types.SpecComponentTypeBeforeEach))
|
||||||
|
Ω(beforeEach.CodeLocation()).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("AfterEachNodes", func() {
|
||||||
|
It("should report the correct type and code location", func() {
|
||||||
|
codeLocation := codelocation.New(0)
|
||||||
|
afterEach := NewAfterEachNode(func() {}, codeLocation, 0, nil, 3)
|
||||||
|
Ω(afterEach.Type()).Should(Equal(types.SpecComponentTypeAfterEach))
|
||||||
|
Ω(afterEach.CodeLocation()).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("JustBeforeEachNodes", func() {
|
||||||
|
It("should report the correct type and code location", func() {
|
||||||
|
codeLocation := codelocation.New(0)
|
||||||
|
justBeforeEach := NewJustBeforeEachNode(func() {}, codeLocation, 0, nil, 3)
|
||||||
|
Ω(justBeforeEach.Type()).Should(Equal(types.SpecComponentTypeJustBeforeEach))
|
||||||
|
Ω(justBeforeEach.CodeLocation()).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
359
vendor/github.com/onsi/ginkgo/internal/leafnodes/shared_runner_test.go
generated
vendored
Normal file
359
vendor/github.com/onsi/ginkgo/internal/leafnodes/shared_runner_test.go
generated
vendored
Normal file
@ -0,0 +1,359 @@
|
|||||||
|
package leafnodes_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"reflect"
|
||||||
|
"runtime"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type runnable interface {
|
||||||
|
Run() (outcome types.SpecState, failure types.SpecFailure)
|
||||||
|
CodeLocation() types.CodeLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
func SynchronousSharedRunnerBehaviors(build func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable, componentType types.SpecComponentType, componentIndex int) {
|
||||||
|
var (
|
||||||
|
outcome types.SpecState
|
||||||
|
failure types.SpecFailure
|
||||||
|
|
||||||
|
failer *Failer.Failer
|
||||||
|
|
||||||
|
componentCodeLocation types.CodeLocation
|
||||||
|
innerCodeLocation types.CodeLocation
|
||||||
|
|
||||||
|
didRun bool
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
failer = Failer.New()
|
||||||
|
componentCodeLocation = codelocation.New(0)
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
|
||||||
|
didRun = false
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("synchronous functions", func() {
|
||||||
|
Context("when the function passes", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
outcome, failure = build(func() {
|
||||||
|
didRun = true
|
||||||
|
}, 0, failer, componentCodeLocation).Run()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have a succesful outcome", func() {
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
|
||||||
|
Ω(outcome).Should(Equal(types.SpecStatePassed))
|
||||||
|
Ω(failure).Should(BeZero())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when a failure occurs", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
outcome, failure = build(func() {
|
||||||
|
didRun = true
|
||||||
|
failer.Fail("bam", innerCodeLocation)
|
||||||
|
panic("should not matter")
|
||||||
|
}, 0, failer, componentCodeLocation).Run()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return the failure", func() {
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
|
||||||
|
Ω(outcome).Should(Equal(types.SpecStateFailed))
|
||||||
|
Ω(failure).Should(Equal(types.SpecFailure{
|
||||||
|
Message: "bam",
|
||||||
|
Location: innerCodeLocation,
|
||||||
|
ForwardedPanic: "",
|
||||||
|
ComponentIndex: componentIndex,
|
||||||
|
ComponentType: componentType,
|
||||||
|
ComponentCodeLocation: componentCodeLocation,
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when a panic occurs", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
outcome, failure = build(func() {
|
||||||
|
didRun = true
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
panic("ack!")
|
||||||
|
}, 0, failer, componentCodeLocation).Run()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return the panic", func() {
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
|
||||||
|
Ω(outcome).Should(Equal(types.SpecStatePanicked))
|
||||||
|
Ω(failure.ForwardedPanic).Should(Equal("ack!"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when a panic occurs with a nil value", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
outcome, failure = build(func() {
|
||||||
|
didRun = true
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
panic(nil)
|
||||||
|
}, 0, failer, componentCodeLocation).Run()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return the nil-valued panic", func() {
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
|
||||||
|
Ω(outcome).Should(Equal(types.SpecStatePanicked))
|
||||||
|
Ω(failure.ForwardedPanic).Should(Equal("<nil>"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func AsynchronousSharedRunnerBehaviors(build func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable, componentType types.SpecComponentType, componentIndex int) {
|
||||||
|
var (
|
||||||
|
outcome types.SpecState
|
||||||
|
failure types.SpecFailure
|
||||||
|
|
||||||
|
failer *Failer.Failer
|
||||||
|
|
||||||
|
componentCodeLocation types.CodeLocation
|
||||||
|
innerCodeLocation types.CodeLocation
|
||||||
|
|
||||||
|
didRun bool
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
failer = Failer.New()
|
||||||
|
componentCodeLocation = codelocation.New(0)
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
|
||||||
|
didRun = false
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("asynchronous functions", func() {
|
||||||
|
var timeoutDuration time.Duration
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
timeoutDuration = time.Duration(1 * float64(time.Second))
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when running", func() {
|
||||||
|
It("should run the function as a goroutine, and block until it's done", func() {
|
||||||
|
initialNumberOfGoRoutines := runtime.NumGoroutine()
|
||||||
|
numberOfGoRoutines := 0
|
||||||
|
|
||||||
|
build(func(done Done) {
|
||||||
|
didRun = true
|
||||||
|
numberOfGoRoutines = runtime.NumGoroutine()
|
||||||
|
close(done)
|
||||||
|
}, timeoutDuration, failer, componentCodeLocation).Run()
|
||||||
|
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
Ω(numberOfGoRoutines).Should(BeNumerically(">=", initialNumberOfGoRoutines+1))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the function passes", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
outcome, failure = build(func(done Done) {
|
||||||
|
didRun = true
|
||||||
|
close(done)
|
||||||
|
}, timeoutDuration, failer, componentCodeLocation).Run()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have a succesful outcome", func() {
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
Ω(outcome).Should(Equal(types.SpecStatePassed))
|
||||||
|
Ω(failure).Should(BeZero())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the function fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
outcome, failure = build(func(done Done) {
|
||||||
|
didRun = true
|
||||||
|
failer.Fail("bam", innerCodeLocation)
|
||||||
|
time.Sleep(20 * time.Millisecond)
|
||||||
|
panic("doesn't matter")
|
||||||
|
close(done)
|
||||||
|
}, 10*time.Millisecond, failer, componentCodeLocation).Run()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return the failure", func() {
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
|
||||||
|
Ω(outcome).Should(Equal(types.SpecStateFailed))
|
||||||
|
Ω(failure).Should(Equal(types.SpecFailure{
|
||||||
|
Message: "bam",
|
||||||
|
Location: innerCodeLocation,
|
||||||
|
ForwardedPanic: "",
|
||||||
|
ComponentIndex: componentIndex,
|
||||||
|
ComponentType: componentType,
|
||||||
|
ComponentCodeLocation: componentCodeLocation,
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the function times out", func() {
|
||||||
|
var guard chan struct{}
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
guard = make(chan struct{})
|
||||||
|
outcome, failure = build(func(done Done) {
|
||||||
|
didRun = true
|
||||||
|
time.Sleep(20 * time.Millisecond)
|
||||||
|
close(guard)
|
||||||
|
panic("doesn't matter")
|
||||||
|
close(done)
|
||||||
|
}, 10*time.Millisecond, failer, componentCodeLocation).Run()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return the timeout", func() {
|
||||||
|
<-guard
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
|
||||||
|
Ω(outcome).Should(Equal(types.SpecStateTimedOut))
|
||||||
|
Ω(failure).Should(Equal(types.SpecFailure{
|
||||||
|
Message: "Timed out",
|
||||||
|
Location: componentCodeLocation,
|
||||||
|
ForwardedPanic: "",
|
||||||
|
ComponentIndex: componentIndex,
|
||||||
|
ComponentType: componentType,
|
||||||
|
ComponentCodeLocation: componentCodeLocation,
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the function panics", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
outcome, failure = build(func(done Done) {
|
||||||
|
didRun = true
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
panic("ack!")
|
||||||
|
}, 100*time.Millisecond, failer, componentCodeLocation).Run()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return the panic", func() {
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
|
||||||
|
Ω(outcome).Should(Equal(types.SpecStatePanicked))
|
||||||
|
Ω(failure.ForwardedPanic).Should(Equal("ack!"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the function panics with a nil value", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
outcome, failure = build(func(done Done) {
|
||||||
|
didRun = true
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
panic(nil)
|
||||||
|
}, 100*time.Millisecond, failer, componentCodeLocation).Run()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return the nil-valued panic", func() {
|
||||||
|
Ω(didRun).Should(BeTrue())
|
||||||
|
|
||||||
|
Ω(outcome).Should(Equal(types.SpecStatePanicked))
|
||||||
|
Ω(failure.ForwardedPanic).Should(Equal("<nil>"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func InvalidSharedRunnerBehaviors(build func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable, componentType types.SpecComponentType) {
|
||||||
|
var (
|
||||||
|
failer *Failer.Failer
|
||||||
|
componentCodeLocation types.CodeLocation
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
failer = Failer.New()
|
||||||
|
componentCodeLocation = codelocation.New(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("invalid functions", func() {
|
||||||
|
Context("when passed something that's not a function", func() {
|
||||||
|
It("should panic", func() {
|
||||||
|
Ω(func() {
|
||||||
|
build("not a function", 0, failer, componentCodeLocation)
|
||||||
|
}).Should(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the function takes the wrong kind of argument", func() {
|
||||||
|
It("should panic", func() {
|
||||||
|
Ω(func() {
|
||||||
|
build(func(oops string) {}, 0, failer, componentCodeLocation)
|
||||||
|
}).Should(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the function takes more than one argument", func() {
|
||||||
|
It("should panic", func() {
|
||||||
|
Ω(func() {
|
||||||
|
build(func(done Done, oops string) {}, 0, failer, componentCodeLocation)
|
||||||
|
}).Should(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ = Describe("Shared RunnableNode behavior", func() {
|
||||||
|
Describe("It Nodes", func() {
|
||||||
|
build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||||
|
return NewItNode("", body, types.FlagTypeFocused, componentCodeLocation, timeout, failer, 3)
|
||||||
|
}
|
||||||
|
|
||||||
|
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeIt, 3)
|
||||||
|
AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeIt, 3)
|
||||||
|
InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeIt)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Measure Nodes", func() {
|
||||||
|
build := func(body interface{}, _ time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||||
|
return NewMeasureNode("", func(Benchmarker) {
|
||||||
|
reflect.ValueOf(body).Call([]reflect.Value{})
|
||||||
|
}, types.FlagTypeFocused, componentCodeLocation, 10, failer, 3)
|
||||||
|
}
|
||||||
|
|
||||||
|
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeMeasure, 3)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("BeforeEach Nodes", func() {
|
||||||
|
build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||||
|
return NewBeforeEachNode(body, componentCodeLocation, timeout, failer, 3)
|
||||||
|
}
|
||||||
|
|
||||||
|
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeBeforeEach, 3)
|
||||||
|
AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeBeforeEach, 3)
|
||||||
|
InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeBeforeEach)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("AfterEach Nodes", func() {
|
||||||
|
build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||||
|
return NewAfterEachNode(body, componentCodeLocation, timeout, failer, 3)
|
||||||
|
}
|
||||||
|
|
||||||
|
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeAfterEach, 3)
|
||||||
|
AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeAfterEach, 3)
|
||||||
|
InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeAfterEach)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("JustBeforeEach Nodes", func() {
|
||||||
|
build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||||
|
return NewJustBeforeEachNode(body, componentCodeLocation, timeout, failer, 3)
|
||||||
|
}
|
||||||
|
|
||||||
|
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeJustBeforeEach, 3)
|
||||||
|
AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeJustBeforeEach, 3)
|
||||||
|
InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeJustBeforeEach)
|
||||||
|
})
|
||||||
|
})
|
54
vendor/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes.go
generated
vendored
Normal file
54
vendor/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes.go
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
package leafnodes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SuiteNode interface {
|
||||||
|
Run(parallelNode int, parallelTotal int, syncHost string) bool
|
||||||
|
Passed() bool
|
||||||
|
Summary() *types.SetupSummary
|
||||||
|
}
|
||||||
|
|
||||||
|
type simpleSuiteNode struct {
|
||||||
|
runner *runner
|
||||||
|
outcome types.SpecState
|
||||||
|
failure types.SpecFailure
|
||||||
|
runTime time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *simpleSuiteNode) Run(parallelNode int, parallelTotal int, syncHost string) bool {
|
||||||
|
t := time.Now()
|
||||||
|
node.outcome, node.failure = node.runner.run()
|
||||||
|
node.runTime = time.Since(t)
|
||||||
|
|
||||||
|
return node.outcome == types.SpecStatePassed
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *simpleSuiteNode) Passed() bool {
|
||||||
|
return node.outcome == types.SpecStatePassed
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *simpleSuiteNode) Summary() *types.SetupSummary {
|
||||||
|
return &types.SetupSummary{
|
||||||
|
ComponentType: node.runner.nodeType,
|
||||||
|
CodeLocation: node.runner.codeLocation,
|
||||||
|
State: node.outcome,
|
||||||
|
RunTime: node.runTime,
|
||||||
|
Failure: node.failure,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewBeforeSuiteNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer) SuiteNode {
|
||||||
|
return &simpleSuiteNode{
|
||||||
|
runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeBeforeSuite, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAfterSuiteNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer) SuiteNode {
|
||||||
|
return &simpleSuiteNode{
|
||||||
|
runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeAfterSuite, 0),
|
||||||
|
}
|
||||||
|
}
|
230
vendor/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes_test.go
generated
vendored
Normal file
230
vendor/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes_test.go
generated
vendored
Normal file
@ -0,0 +1,230 @@
|
|||||||
|
package leafnodes_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||||
|
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("SuiteNodes", func() {
|
||||||
|
Describe("BeforeSuite nodes", func() {
|
||||||
|
var befSuite SuiteNode
|
||||||
|
var failer *Failer.Failer
|
||||||
|
var codeLocation types.CodeLocation
|
||||||
|
var innerCodeLocation types.CodeLocation
|
||||||
|
var outcome bool
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
failer = Failer.New()
|
||||||
|
codeLocation = codelocation.New(0)
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the body passes", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
befSuite = NewBeforeSuiteNode(func() {
|
||||||
|
time.Sleep(10 * time.Millisecond)
|
||||||
|
}, codeLocation, 0, failer)
|
||||||
|
outcome = befSuite.Run(0, 0, "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return true when run and report as passed", func() {
|
||||||
|
Ω(outcome).Should(BeTrue())
|
||||||
|
Ω(befSuite.Passed()).Should(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have the correct summary", func() {
|
||||||
|
summary := befSuite.Summary()
|
||||||
|
Ω(summary.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||||
|
Ω(summary.CodeLocation).Should(Equal(codeLocation))
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStatePassed))
|
||||||
|
Ω(summary.RunTime).Should(BeNumerically(">=", 10*time.Millisecond))
|
||||||
|
Ω(summary.Failure).Should(BeZero())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the body fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
befSuite = NewBeforeSuiteNode(func() {
|
||||||
|
failer.Fail("oops", innerCodeLocation)
|
||||||
|
}, codeLocation, 0, failer)
|
||||||
|
outcome = befSuite.Run(0, 0, "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return false when run and report as failed", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(befSuite.Passed()).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have the correct summary", func() {
|
||||||
|
summary := befSuite.Summary()
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStateFailed))
|
||||||
|
Ω(summary.Failure.Message).Should(Equal("oops"))
|
||||||
|
Ω(summary.Failure.Location).Should(Equal(innerCodeLocation))
|
||||||
|
Ω(summary.Failure.ForwardedPanic).Should(BeEmpty())
|
||||||
|
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||||
|
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||||
|
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the body times out", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
befSuite = NewBeforeSuiteNode(func(done Done) {
|
||||||
|
}, codeLocation, time.Millisecond, failer)
|
||||||
|
outcome = befSuite.Run(0, 0, "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return false when run and report as failed", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(befSuite.Passed()).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have the correct summary", func() {
|
||||||
|
summary := befSuite.Summary()
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStateTimedOut))
|
||||||
|
Ω(summary.Failure.ForwardedPanic).Should(BeEmpty())
|
||||||
|
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||||
|
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||||
|
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the body panics", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
befSuite = NewBeforeSuiteNode(func() {
|
||||||
|
panic("bam")
|
||||||
|
}, codeLocation, 0, failer)
|
||||||
|
outcome = befSuite.Run(0, 0, "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return false when run and report as failed", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(befSuite.Passed()).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have the correct summary", func() {
|
||||||
|
summary := befSuite.Summary()
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStatePanicked))
|
||||||
|
Ω(summary.Failure.ForwardedPanic).Should(Equal("bam"))
|
||||||
|
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||||
|
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||||
|
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("AfterSuite nodes", func() {
|
||||||
|
var aftSuite SuiteNode
|
||||||
|
var failer *Failer.Failer
|
||||||
|
var codeLocation types.CodeLocation
|
||||||
|
var innerCodeLocation types.CodeLocation
|
||||||
|
var outcome bool
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
failer = Failer.New()
|
||||||
|
codeLocation = codelocation.New(0)
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the body passes", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
aftSuite = NewAfterSuiteNode(func() {
|
||||||
|
time.Sleep(10 * time.Millisecond)
|
||||||
|
}, codeLocation, 0, failer)
|
||||||
|
outcome = aftSuite.Run(0, 0, "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return true when run and report as passed", func() {
|
||||||
|
Ω(outcome).Should(BeTrue())
|
||||||
|
Ω(aftSuite.Passed()).Should(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have the correct summary", func() {
|
||||||
|
summary := aftSuite.Summary()
|
||||||
|
Ω(summary.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite))
|
||||||
|
Ω(summary.CodeLocation).Should(Equal(codeLocation))
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStatePassed))
|
||||||
|
Ω(summary.RunTime).Should(BeNumerically(">=", 10*time.Millisecond))
|
||||||
|
Ω(summary.Failure).Should(BeZero())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the body fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
aftSuite = NewAfterSuiteNode(func() {
|
||||||
|
failer.Fail("oops", innerCodeLocation)
|
||||||
|
}, codeLocation, 0, failer)
|
||||||
|
outcome = aftSuite.Run(0, 0, "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return false when run and report as failed", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(aftSuite.Passed()).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have the correct summary", func() {
|
||||||
|
summary := aftSuite.Summary()
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStateFailed))
|
||||||
|
Ω(summary.Failure.Message).Should(Equal("oops"))
|
||||||
|
Ω(summary.Failure.Location).Should(Equal(innerCodeLocation))
|
||||||
|
Ω(summary.Failure.ForwardedPanic).Should(BeEmpty())
|
||||||
|
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||||
|
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite))
|
||||||
|
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the body times out", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
aftSuite = NewAfterSuiteNode(func(done Done) {
|
||||||
|
}, codeLocation, time.Millisecond, failer)
|
||||||
|
outcome = aftSuite.Run(0, 0, "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return false when run and report as failed", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(aftSuite.Passed()).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have the correct summary", func() {
|
||||||
|
summary := aftSuite.Summary()
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStateTimedOut))
|
||||||
|
Ω(summary.Failure.ForwardedPanic).Should(BeEmpty())
|
||||||
|
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||||
|
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite))
|
||||||
|
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the body panics", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
aftSuite = NewAfterSuiteNode(func() {
|
||||||
|
panic("bam")
|
||||||
|
}, codeLocation, 0, failer)
|
||||||
|
outcome = aftSuite.Run(0, 0, "")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return false when run and report as failed", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(aftSuite.Passed()).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should have the correct summary", func() {
|
||||||
|
summary := aftSuite.Summary()
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStatePanicked))
|
||||||
|
Ω(summary.Failure.ForwardedPanic).Should(Equal("bam"))
|
||||||
|
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||||
|
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite))
|
||||||
|
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
89
vendor/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node.go
generated
vendored
Normal file
89
vendor/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node.go
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
package leafnodes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type synchronizedAfterSuiteNode struct {
|
||||||
|
runnerA *runner
|
||||||
|
runnerB *runner
|
||||||
|
|
||||||
|
outcome types.SpecState
|
||||||
|
failure types.SpecFailure
|
||||||
|
runTime time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSynchronizedAfterSuiteNode(bodyA interface{}, bodyB interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer) SuiteNode {
|
||||||
|
return &synchronizedAfterSuiteNode{
|
||||||
|
runnerA: newRunner(bodyA, codeLocation, timeout, failer, types.SpecComponentTypeAfterSuite, 0),
|
||||||
|
runnerB: newRunner(bodyB, codeLocation, timeout, failer, types.SpecComponentTypeAfterSuite, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedAfterSuiteNode) Run(parallelNode int, parallelTotal int, syncHost string) bool {
|
||||||
|
node.outcome, node.failure = node.runnerA.run()
|
||||||
|
|
||||||
|
if parallelNode == 1 {
|
||||||
|
if parallelTotal > 1 {
|
||||||
|
node.waitUntilOtherNodesAreDone(syncHost)
|
||||||
|
}
|
||||||
|
|
||||||
|
outcome, failure := node.runnerB.run()
|
||||||
|
|
||||||
|
if node.outcome == types.SpecStatePassed {
|
||||||
|
node.outcome, node.failure = outcome, failure
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return node.outcome == types.SpecStatePassed
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedAfterSuiteNode) Passed() bool {
|
||||||
|
return node.outcome == types.SpecStatePassed
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedAfterSuiteNode) Summary() *types.SetupSummary {
|
||||||
|
return &types.SetupSummary{
|
||||||
|
ComponentType: node.runnerA.nodeType,
|
||||||
|
CodeLocation: node.runnerA.codeLocation,
|
||||||
|
State: node.outcome,
|
||||||
|
RunTime: node.runTime,
|
||||||
|
Failure: node.failure,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedAfterSuiteNode) waitUntilOtherNodesAreDone(syncHost string) {
|
||||||
|
for {
|
||||||
|
if node.canRun(syncHost) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
time.Sleep(50 * time.Millisecond)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedAfterSuiteNode) canRun(syncHost string) bool {
|
||||||
|
resp, err := http.Get(syncHost + "/RemoteAfterSuiteData")
|
||||||
|
if err != nil || resp.StatusCode != http.StatusOK {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
resp.Body.Close()
|
||||||
|
|
||||||
|
afterSuiteData := types.RemoteAfterSuiteData{}
|
||||||
|
err = json.Unmarshal(body, &afterSuiteData)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return afterSuiteData.CanRun
|
||||||
|
}
|
196
vendor/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node_test.go
generated
vendored
Normal file
196
vendor/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node_test.go
generated
vendored
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
package leafnodes_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/onsi/gomega/ghttp"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("SynchronizedAfterSuiteNode", func() {
|
||||||
|
var failer *Failer.Failer
|
||||||
|
var node SuiteNode
|
||||||
|
var codeLocation types.CodeLocation
|
||||||
|
var innerCodeLocation types.CodeLocation
|
||||||
|
var outcome bool
|
||||||
|
var server *ghttp.Server
|
||||||
|
var things []string
|
||||||
|
var lock *sync.Mutex
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
things = []string{}
|
||||||
|
server = ghttp.NewServer()
|
||||||
|
codeLocation = codelocation.New(0)
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
failer = Failer.New()
|
||||||
|
lock = &sync.Mutex{}
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
server.Close()
|
||||||
|
})
|
||||||
|
|
||||||
|
newNode := func(bodyA interface{}, bodyB interface{}) SuiteNode {
|
||||||
|
return NewSynchronizedAfterSuiteNode(bodyA, bodyB, codeLocation, time.Millisecond, failer)
|
||||||
|
}
|
||||||
|
|
||||||
|
ranThing := func(thing string) {
|
||||||
|
lock.Lock()
|
||||||
|
defer lock.Unlock()
|
||||||
|
things = append(things, thing)
|
||||||
|
}
|
||||||
|
|
||||||
|
thingsThatRan := func() []string {
|
||||||
|
lock.Lock()
|
||||||
|
defer lock.Unlock()
|
||||||
|
return things
|
||||||
|
}
|
||||||
|
|
||||||
|
Context("when not running in parallel", func() {
|
||||||
|
Context("when all is well", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
node = newNode(func() {
|
||||||
|
ranThing("A")
|
||||||
|
}, func() {
|
||||||
|
ranThing("B")
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(1, 1, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run A, then B", func() {
|
||||||
|
Ω(thingsThatRan()).Should(Equal([]string{"A", "B"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report success", func() {
|
||||||
|
Ω(outcome).Should(BeTrue())
|
||||||
|
Ω(node.Passed()).Should(BeTrue())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStatePassed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when A fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
node = newNode(func() {
|
||||||
|
ranThing("A")
|
||||||
|
failer.Fail("bam", innerCodeLocation)
|
||||||
|
}, func() {
|
||||||
|
ranThing("B")
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(1, 1, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should still run B", func() {
|
||||||
|
Ω(thingsThatRan()).Should(Equal([]string{"A", "B"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report failure", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(node.Passed()).Should(BeFalse())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStateFailed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when B fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
node = newNode(func() {
|
||||||
|
ranThing("A")
|
||||||
|
}, func() {
|
||||||
|
ranThing("B")
|
||||||
|
failer.Fail("bam", innerCodeLocation)
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(1, 1, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run all the things", func() {
|
||||||
|
Ω(thingsThatRan()).Should(Equal([]string{"A", "B"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report failure", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(node.Passed()).Should(BeFalse())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStateFailed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when running in parallel", func() {
|
||||||
|
Context("as the first node", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
server.AppendHandlers(ghttp.CombineHandlers(
|
||||||
|
ghttp.VerifyRequest("GET", "/RemoteAfterSuiteData"),
|
||||||
|
func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
ranThing("Request1")
|
||||||
|
},
|
||||||
|
ghttp.RespondWithJSONEncoded(200, types.RemoteAfterSuiteData{false}),
|
||||||
|
), ghttp.CombineHandlers(
|
||||||
|
ghttp.VerifyRequest("GET", "/RemoteAfterSuiteData"),
|
||||||
|
func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
ranThing("Request2")
|
||||||
|
},
|
||||||
|
ghttp.RespondWithJSONEncoded(200, types.RemoteAfterSuiteData{false}),
|
||||||
|
), ghttp.CombineHandlers(
|
||||||
|
ghttp.VerifyRequest("GET", "/RemoteAfterSuiteData"),
|
||||||
|
func(writer http.ResponseWriter, request *http.Request) {
|
||||||
|
ranThing("Request3")
|
||||||
|
},
|
||||||
|
ghttp.RespondWithJSONEncoded(200, types.RemoteAfterSuiteData{true}),
|
||||||
|
))
|
||||||
|
|
||||||
|
node = newNode(func() {
|
||||||
|
ranThing("A")
|
||||||
|
}, func() {
|
||||||
|
ranThing("B")
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(1, 3, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run A and, when the server says its time, run B", func() {
|
||||||
|
Ω(thingsThatRan()).Should(Equal([]string{"A", "Request1", "Request2", "Request3", "B"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report success", func() {
|
||||||
|
Ω(outcome).Should(BeTrue())
|
||||||
|
Ω(node.Passed()).Should(BeTrue())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStatePassed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("as any other node", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
node = newNode(func() {
|
||||||
|
ranThing("A")
|
||||||
|
}, func() {
|
||||||
|
ranThing("B")
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(2, 3, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run A, and not run B", func() {
|
||||||
|
Ω(thingsThatRan()).Should(Equal([]string{"A"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not talk to the server", func() {
|
||||||
|
Ω(server.ReceivedRequests()).Should(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report success", func() {
|
||||||
|
Ω(outcome).Should(BeTrue())
|
||||||
|
Ω(node.Passed()).Should(BeTrue())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStatePassed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
182
vendor/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node.go
generated
vendored
Normal file
182
vendor/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node.go
generated
vendored
Normal file
@ -0,0 +1,182 @@
|
|||||||
|
package leafnodes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"reflect"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type synchronizedBeforeSuiteNode struct {
|
||||||
|
runnerA *runner
|
||||||
|
runnerB *runner
|
||||||
|
|
||||||
|
data []byte
|
||||||
|
|
||||||
|
outcome types.SpecState
|
||||||
|
failure types.SpecFailure
|
||||||
|
runTime time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSynchronizedBeforeSuiteNode(bodyA interface{}, bodyB interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer) SuiteNode {
|
||||||
|
node := &synchronizedBeforeSuiteNode{}
|
||||||
|
|
||||||
|
node.runnerA = newRunner(node.wrapA(bodyA), codeLocation, timeout, failer, types.SpecComponentTypeBeforeSuite, 0)
|
||||||
|
node.runnerB = newRunner(node.wrapB(bodyB), codeLocation, timeout, failer, types.SpecComponentTypeBeforeSuite, 0)
|
||||||
|
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedBeforeSuiteNode) Run(parallelNode int, parallelTotal int, syncHost string) bool {
|
||||||
|
t := time.Now()
|
||||||
|
defer func() {
|
||||||
|
node.runTime = time.Since(t)
|
||||||
|
}()
|
||||||
|
|
||||||
|
if parallelNode == 1 {
|
||||||
|
node.outcome, node.failure = node.runA(parallelTotal, syncHost)
|
||||||
|
} else {
|
||||||
|
node.outcome, node.failure = node.waitForA(syncHost)
|
||||||
|
}
|
||||||
|
|
||||||
|
if node.outcome != types.SpecStatePassed {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
node.outcome, node.failure = node.runnerB.run()
|
||||||
|
|
||||||
|
return node.outcome == types.SpecStatePassed
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedBeforeSuiteNode) runA(parallelTotal int, syncHost string) (types.SpecState, types.SpecFailure) {
|
||||||
|
outcome, failure := node.runnerA.run()
|
||||||
|
|
||||||
|
if parallelTotal > 1 {
|
||||||
|
state := types.RemoteBeforeSuiteStatePassed
|
||||||
|
if outcome != types.SpecStatePassed {
|
||||||
|
state = types.RemoteBeforeSuiteStateFailed
|
||||||
|
}
|
||||||
|
json := (types.RemoteBeforeSuiteData{
|
||||||
|
Data: node.data,
|
||||||
|
State: state,
|
||||||
|
}).ToJSON()
|
||||||
|
http.Post(syncHost+"/BeforeSuiteState", "application/json", bytes.NewBuffer(json))
|
||||||
|
}
|
||||||
|
|
||||||
|
return outcome, failure
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedBeforeSuiteNode) waitForA(syncHost string) (types.SpecState, types.SpecFailure) {
|
||||||
|
failure := func(message string) types.SpecFailure {
|
||||||
|
return types.SpecFailure{
|
||||||
|
Message: message,
|
||||||
|
Location: node.runnerA.codeLocation,
|
||||||
|
ComponentType: node.runnerA.nodeType,
|
||||||
|
ComponentIndex: node.runnerA.componentIndex,
|
||||||
|
ComponentCodeLocation: node.runnerA.codeLocation,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
resp, err := http.Get(syncHost + "/BeforeSuiteState")
|
||||||
|
if err != nil || resp.StatusCode != http.StatusOK {
|
||||||
|
return types.SpecStateFailed, failure("Failed to fetch BeforeSuite state")
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return types.SpecStateFailed, failure("Failed to read BeforeSuite state")
|
||||||
|
}
|
||||||
|
resp.Body.Close()
|
||||||
|
|
||||||
|
beforeSuiteData := types.RemoteBeforeSuiteData{}
|
||||||
|
err = json.Unmarshal(body, &beforeSuiteData)
|
||||||
|
if err != nil {
|
||||||
|
return types.SpecStateFailed, failure("Failed to decode BeforeSuite state")
|
||||||
|
}
|
||||||
|
|
||||||
|
switch beforeSuiteData.State {
|
||||||
|
case types.RemoteBeforeSuiteStatePassed:
|
||||||
|
node.data = beforeSuiteData.Data
|
||||||
|
return types.SpecStatePassed, types.SpecFailure{}
|
||||||
|
case types.RemoteBeforeSuiteStateFailed:
|
||||||
|
return types.SpecStateFailed, failure("BeforeSuite on Node 1 failed")
|
||||||
|
case types.RemoteBeforeSuiteStateDisappeared:
|
||||||
|
return types.SpecStateFailed, failure("Node 1 disappeared before completing BeforeSuite")
|
||||||
|
}
|
||||||
|
|
||||||
|
time.Sleep(50 * time.Millisecond)
|
||||||
|
}
|
||||||
|
|
||||||
|
return types.SpecStateFailed, failure("Shouldn't get here!")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedBeforeSuiteNode) Passed() bool {
|
||||||
|
return node.outcome == types.SpecStatePassed
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedBeforeSuiteNode) Summary() *types.SetupSummary {
|
||||||
|
return &types.SetupSummary{
|
||||||
|
ComponentType: node.runnerA.nodeType,
|
||||||
|
CodeLocation: node.runnerA.codeLocation,
|
||||||
|
State: node.outcome,
|
||||||
|
RunTime: node.runTime,
|
||||||
|
Failure: node.failure,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedBeforeSuiteNode) wrapA(bodyA interface{}) interface{} {
|
||||||
|
typeA := reflect.TypeOf(bodyA)
|
||||||
|
if typeA.Kind() != reflect.Func {
|
||||||
|
panic("SynchronizedBeforeSuite expects a function as its first argument")
|
||||||
|
}
|
||||||
|
|
||||||
|
takesNothing := typeA.NumIn() == 0
|
||||||
|
takesADoneChannel := typeA.NumIn() == 1 && typeA.In(0).Kind() == reflect.Chan && typeA.In(0).Elem().Kind() == reflect.Interface
|
||||||
|
returnsBytes := typeA.NumOut() == 1 && typeA.Out(0).Kind() == reflect.Slice && typeA.Out(0).Elem().Kind() == reflect.Uint8
|
||||||
|
|
||||||
|
if !((takesNothing || takesADoneChannel) && returnsBytes) {
|
||||||
|
panic("SynchronizedBeforeSuite's first argument should be a function that returns []byte and either takes no arguments or takes a Done channel.")
|
||||||
|
}
|
||||||
|
|
||||||
|
if takesADoneChannel {
|
||||||
|
return func(done chan<- interface{}) {
|
||||||
|
out := reflect.ValueOf(bodyA).Call([]reflect.Value{reflect.ValueOf(done)})
|
||||||
|
node.data = out[0].Interface().([]byte)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return func() {
|
||||||
|
out := reflect.ValueOf(bodyA).Call([]reflect.Value{})
|
||||||
|
node.data = out[0].Interface().([]byte)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (node *synchronizedBeforeSuiteNode) wrapB(bodyB interface{}) interface{} {
|
||||||
|
typeB := reflect.TypeOf(bodyB)
|
||||||
|
if typeB.Kind() != reflect.Func {
|
||||||
|
panic("SynchronizedBeforeSuite expects a function as its second argument")
|
||||||
|
}
|
||||||
|
|
||||||
|
returnsNothing := typeB.NumOut() == 0
|
||||||
|
takesBytesOnly := typeB.NumIn() == 1 && typeB.In(0).Kind() == reflect.Slice && typeB.In(0).Elem().Kind() == reflect.Uint8
|
||||||
|
takesBytesAndDone := typeB.NumIn() == 2 &&
|
||||||
|
typeB.In(0).Kind() == reflect.Slice && typeB.In(0).Elem().Kind() == reflect.Uint8 &&
|
||||||
|
typeB.In(1).Kind() == reflect.Chan && typeB.In(1).Elem().Kind() == reflect.Interface
|
||||||
|
|
||||||
|
if !((takesBytesOnly || takesBytesAndDone) && returnsNothing) {
|
||||||
|
panic("SynchronizedBeforeSuite's second argument should be a function that returns nothing and either takes []byte or ([]byte, Done)")
|
||||||
|
}
|
||||||
|
|
||||||
|
if takesBytesAndDone {
|
||||||
|
return func(done chan<- interface{}) {
|
||||||
|
reflect.ValueOf(bodyB).Call([]reflect.Value{reflect.ValueOf(node.data), reflect.ValueOf(done)})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return func() {
|
||||||
|
reflect.ValueOf(bodyB).Call([]reflect.Value{reflect.ValueOf(node.data)})
|
||||||
|
}
|
||||||
|
}
|
445
vendor/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node_test.go
generated
vendored
Normal file
445
vendor/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node_test.go
generated
vendored
Normal file
@ -0,0 +1,445 @@
|
|||||||
|
package leafnodes_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"github.com/onsi/gomega/ghttp"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/internal/codelocation"
|
||||||
|
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("SynchronizedBeforeSuiteNode", func() {
|
||||||
|
var failer *Failer.Failer
|
||||||
|
var node SuiteNode
|
||||||
|
var codeLocation types.CodeLocation
|
||||||
|
var innerCodeLocation types.CodeLocation
|
||||||
|
var outcome bool
|
||||||
|
var server *ghttp.Server
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
server = ghttp.NewServer()
|
||||||
|
codeLocation = codelocation.New(0)
|
||||||
|
innerCodeLocation = codelocation.New(0)
|
||||||
|
failer = Failer.New()
|
||||||
|
})
|
||||||
|
|
||||||
|
AfterEach(func() {
|
||||||
|
server.Close()
|
||||||
|
})
|
||||||
|
|
||||||
|
newNode := func(bodyA interface{}, bodyB interface{}) SuiteNode {
|
||||||
|
return NewSynchronizedBeforeSuiteNode(bodyA, bodyB, codeLocation, time.Millisecond, failer)
|
||||||
|
}
|
||||||
|
|
||||||
|
Describe("when not running in parallel", func() {
|
||||||
|
Context("when all is well", func() {
|
||||||
|
var data []byte
|
||||||
|
BeforeEach(func() {
|
||||||
|
data = nil
|
||||||
|
|
||||||
|
node = newNode(func() []byte {
|
||||||
|
return []byte("my data")
|
||||||
|
}, func(d []byte) {
|
||||||
|
data = d
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(1, 1, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run A, then B passing the output from A to B", func() {
|
||||||
|
Ω(data).Should(Equal([]byte("my data")))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report success", func() {
|
||||||
|
Ω(outcome).Should(BeTrue())
|
||||||
|
Ω(node.Passed()).Should(BeTrue())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStatePassed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when A fails", func() {
|
||||||
|
var ranB bool
|
||||||
|
BeforeEach(func() {
|
||||||
|
ranB = false
|
||||||
|
node = newNode(func() []byte {
|
||||||
|
failer.Fail("boom", innerCodeLocation)
|
||||||
|
return nil
|
||||||
|
}, func([]byte) {
|
||||||
|
ranB = true
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(1, 1, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not run B", func() {
|
||||||
|
Ω(ranB).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report failure", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(node.Passed()).Should(BeFalse())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStateFailed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when B fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
node = newNode(func() []byte {
|
||||||
|
return nil
|
||||||
|
}, func([]byte) {
|
||||||
|
failer.Fail("boom", innerCodeLocation)
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(1, 1, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report failure", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(node.Passed()).Should(BeFalse())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStateFailed))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when A times out", func() {
|
||||||
|
var ranB bool
|
||||||
|
BeforeEach(func() {
|
||||||
|
ranB = false
|
||||||
|
node = newNode(func(Done) []byte {
|
||||||
|
time.Sleep(time.Second)
|
||||||
|
return nil
|
||||||
|
}, func([]byte) {
|
||||||
|
ranB = true
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(1, 1, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not run B", func() {
|
||||||
|
Ω(ranB).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report failure", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(node.Passed()).Should(BeFalse())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStateTimedOut))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when B times out", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
node = newNode(func() []byte {
|
||||||
|
return nil
|
||||||
|
}, func([]byte, Done) {
|
||||||
|
time.Sleep(time.Second)
|
||||||
|
})
|
||||||
|
|
||||||
|
outcome = node.Run(1, 1, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report failure", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(node.Passed()).Should(BeFalse())
|
||||||
|
Ω(node.Summary().State).Should(Equal(types.SpecStateTimedOut))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("when running in parallel", func() {
|
||||||
|
var ranB bool
|
||||||
|
var parallelNode, parallelTotal int
|
||||||
|
BeforeEach(func() {
|
||||||
|
ranB = false
|
||||||
|
parallelNode, parallelTotal = 1, 3
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("as the first node, it runs A", func() {
|
||||||
|
var expectedState types.RemoteBeforeSuiteData
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
parallelNode, parallelTotal = 1, 3
|
||||||
|
})
|
||||||
|
|
||||||
|
JustBeforeEach(func() {
|
||||||
|
server.AppendHandlers(ghttp.CombineHandlers(
|
||||||
|
ghttp.VerifyRequest("POST", "/BeforeSuiteState"),
|
||||||
|
ghttp.VerifyJSONRepresenting(expectedState),
|
||||||
|
))
|
||||||
|
|
||||||
|
outcome = node.Run(parallelNode, parallelTotal, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when A succeeds", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
expectedState = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStatePassed}
|
||||||
|
|
||||||
|
node = newNode(func() []byte {
|
||||||
|
return []byte("my data")
|
||||||
|
}, func([]byte) {
|
||||||
|
ranB = true
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should post about A succeeding", func() {
|
||||||
|
Ω(server.ReceivedRequests()).Should(HaveLen(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run B", func() {
|
||||||
|
Ω(ranB).Should(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report success", func() {
|
||||||
|
Ω(outcome).Should(BeTrue())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when A fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
expectedState = types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStateFailed}
|
||||||
|
|
||||||
|
node = newNode(func() []byte {
|
||||||
|
panic("BAM")
|
||||||
|
return []byte("my data")
|
||||||
|
}, func([]byte) {
|
||||||
|
ranB = true
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should post about A failing", func() {
|
||||||
|
Ω(server.ReceivedRequests()).Should(HaveLen(1))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not run B", func() {
|
||||||
|
Ω(ranB).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report failure", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("as the Nth node", func() {
|
||||||
|
var statusCode int
|
||||||
|
var response interface{}
|
||||||
|
var ranA bool
|
||||||
|
var bData []byte
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
ranA = false
|
||||||
|
bData = nil
|
||||||
|
|
||||||
|
statusCode = http.StatusOK
|
||||||
|
|
||||||
|
server.AppendHandlers(ghttp.CombineHandlers(
|
||||||
|
ghttp.VerifyRequest("GET", "/BeforeSuiteState"),
|
||||||
|
ghttp.RespondWith(http.StatusOK, string((types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}).ToJSON())),
|
||||||
|
), ghttp.CombineHandlers(
|
||||||
|
ghttp.VerifyRequest("GET", "/BeforeSuiteState"),
|
||||||
|
ghttp.RespondWith(http.StatusOK, string((types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}).ToJSON())),
|
||||||
|
), ghttp.CombineHandlers(
|
||||||
|
ghttp.VerifyRequest("GET", "/BeforeSuiteState"),
|
||||||
|
ghttp.RespondWithJSONEncodedPtr(&statusCode, &response),
|
||||||
|
))
|
||||||
|
|
||||||
|
node = newNode(func() []byte {
|
||||||
|
ranA = true
|
||||||
|
return nil
|
||||||
|
}, func(data []byte) {
|
||||||
|
bData = data
|
||||||
|
})
|
||||||
|
|
||||||
|
parallelNode, parallelTotal = 2, 3
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when A on node1 succeeds", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
response = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStatePassed}
|
||||||
|
outcome = node.Run(parallelNode, parallelTotal, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not run A", func() {
|
||||||
|
Ω(ranA).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should poll for A", func() {
|
||||||
|
Ω(server.ReceivedRequests()).Should(HaveLen(3))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should run B when the polling succeeds", func() {
|
||||||
|
Ω(bData).Should(Equal([]byte("my data")))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should succeed", func() {
|
||||||
|
Ω(outcome).Should(BeTrue())
|
||||||
|
Ω(node.Passed()).Should(BeTrue())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when A on node1 fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
response = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStateFailed}
|
||||||
|
outcome = node.Run(parallelNode, parallelTotal, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not run A", func() {
|
||||||
|
Ω(ranA).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should poll for A", func() {
|
||||||
|
Ω(server.ReceivedRequests()).Should(HaveLen(3))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not run B", func() {
|
||||||
|
Ω(bData).Should(BeNil())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(node.Passed()).Should(BeFalse())
|
||||||
|
|
||||||
|
summary := node.Summary()
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStateFailed))
|
||||||
|
Ω(summary.Failure.Message).Should(Equal("BeforeSuite on Node 1 failed"))
|
||||||
|
Ω(summary.Failure.Location).Should(Equal(codeLocation))
|
||||||
|
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||||
|
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||||
|
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when node1 disappears", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
response = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStateDisappeared}
|
||||||
|
outcome = node.Run(parallelNode, parallelTotal, server.URL())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not run A", func() {
|
||||||
|
Ω(ranA).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should poll for A", func() {
|
||||||
|
Ω(server.ReceivedRequests()).Should(HaveLen(3))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not run B", func() {
|
||||||
|
Ω(bData).Should(BeNil())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should fail", func() {
|
||||||
|
Ω(outcome).Should(BeFalse())
|
||||||
|
Ω(node.Passed()).Should(BeFalse())
|
||||||
|
|
||||||
|
summary := node.Summary()
|
||||||
|
Ω(summary.State).Should(Equal(types.SpecStateFailed))
|
||||||
|
Ω(summary.Failure.Message).Should(Equal("Node 1 disappeared before completing BeforeSuite"))
|
||||||
|
Ω(summary.Failure.Location).Should(Equal(codeLocation))
|
||||||
|
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||||
|
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||||
|
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("construction", func() {
|
||||||
|
Describe("the first function", func() {
|
||||||
|
Context("when the first function returns a byte array", func() {
|
||||||
|
Context("and takes nothing", func() {
|
||||||
|
It("should be fine", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func() []byte { return nil }, func([]byte) {})
|
||||||
|
}).ShouldNot(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("and takes a done function", func() {
|
||||||
|
It("should be fine", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func(Done) []byte { return nil }, func([]byte) {})
|
||||||
|
}).ShouldNot(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("and takes more than one thing", func() {
|
||||||
|
It("should panic", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func(Done, Done) []byte { return nil }, func([]byte) {})
|
||||||
|
}).Should(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("and takes something else", func() {
|
||||||
|
It("should panic", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func(bool) []byte { return nil }, func([]byte) {})
|
||||||
|
}).Should(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the first function does not return a byte array", func() {
|
||||||
|
It("should panic", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func() {}, func([]byte) {})
|
||||||
|
}).Should(Panic())
|
||||||
|
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func() []int { return nil }, func([]byte) {})
|
||||||
|
}).Should(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("the second function", func() {
|
||||||
|
Context("when the second function takes a byte array", func() {
|
||||||
|
It("should be fine", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func() []byte { return nil }, func([]byte) {})
|
||||||
|
}).ShouldNot(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when it also takes a done channel", func() {
|
||||||
|
It("should be fine", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func() []byte { return nil }, func([]byte, Done) {})
|
||||||
|
}).ShouldNot(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if it takes anything else", func() {
|
||||||
|
It("should panic", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func() []byte { return nil }, func([]byte, chan bool) {})
|
||||||
|
}).Should(Panic())
|
||||||
|
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func() []byte { return nil }, func(string) {})
|
||||||
|
}).Should(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if it takes nothing at all", func() {
|
||||||
|
It("should panic", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func() []byte { return nil }, func() {})
|
||||||
|
}).Should(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("if it returns something", func() {
|
||||||
|
It("should panic", func() {
|
||||||
|
Ω(func() {
|
||||||
|
newNode(func() []byte { return nil }, func([]byte) []byte { return nil })
|
||||||
|
}).Should(Panic())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
251
vendor/github.com/onsi/ginkgo/internal/remote/aggregator.go
generated
vendored
Normal file
251
vendor/github.com/onsi/ginkgo/internal/remote/aggregator.go
generated
vendored
Normal file
@ -0,0 +1,251 @@
|
|||||||
|
/*
|
||||||
|
|
||||||
|
Aggregator is a reporter used by the Ginkgo CLI to aggregate and present parallel test output
|
||||||
|
coherently as tests complete. You shouldn't need to use this in your code. To run tests in parallel:
|
||||||
|
|
||||||
|
ginkgo -nodes=N
|
||||||
|
|
||||||
|
where N is the number of nodes you desire.
|
||||||
|
*/
|
||||||
|
package remote
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
"github.com/onsi/ginkgo/reporters/stenographer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
type configAndSuite struct {
|
||||||
|
config config.GinkgoConfigType
|
||||||
|
summary *types.SuiteSummary
|
||||||
|
}
|
||||||
|
|
||||||
|
type Aggregator struct {
|
||||||
|
nodeCount int
|
||||||
|
config config.DefaultReporterConfigType
|
||||||
|
stenographer stenographer.Stenographer
|
||||||
|
result chan bool
|
||||||
|
|
||||||
|
suiteBeginnings chan configAndSuite
|
||||||
|
aggregatedSuiteBeginnings []configAndSuite
|
||||||
|
|
||||||
|
beforeSuites chan *types.SetupSummary
|
||||||
|
aggregatedBeforeSuites []*types.SetupSummary
|
||||||
|
|
||||||
|
afterSuites chan *types.SetupSummary
|
||||||
|
aggregatedAfterSuites []*types.SetupSummary
|
||||||
|
|
||||||
|
specCompletions chan *types.SpecSummary
|
||||||
|
completedSpecs []*types.SpecSummary
|
||||||
|
|
||||||
|
suiteEndings chan *types.SuiteSummary
|
||||||
|
aggregatedSuiteEndings []*types.SuiteSummary
|
||||||
|
specs []*types.SpecSummary
|
||||||
|
|
||||||
|
startTime time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAggregator(nodeCount int, result chan bool, config config.DefaultReporterConfigType, stenographer stenographer.Stenographer) *Aggregator {
|
||||||
|
aggregator := &Aggregator{
|
||||||
|
nodeCount: nodeCount,
|
||||||
|
result: result,
|
||||||
|
config: config,
|
||||||
|
stenographer: stenographer,
|
||||||
|
|
||||||
|
suiteBeginnings: make(chan configAndSuite, 0),
|
||||||
|
beforeSuites: make(chan *types.SetupSummary, 0),
|
||||||
|
afterSuites: make(chan *types.SetupSummary, 0),
|
||||||
|
specCompletions: make(chan *types.SpecSummary, 0),
|
||||||
|
suiteEndings: make(chan *types.SuiteSummary, 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
go aggregator.mux()
|
||||||
|
|
||||||
|
return aggregator
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) SpecSuiteWillBegin(config config.GinkgoConfigType, summary *types.SuiteSummary) {
|
||||||
|
aggregator.suiteBeginnings <- configAndSuite{config, summary}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) BeforeSuiteDidRun(setupSummary *types.SetupSummary) {
|
||||||
|
aggregator.beforeSuites <- setupSummary
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) AfterSuiteDidRun(setupSummary *types.SetupSummary) {
|
||||||
|
aggregator.afterSuites <- setupSummary
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) SpecWillRun(specSummary *types.SpecSummary) {
|
||||||
|
//noop
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) SpecDidComplete(specSummary *types.SpecSummary) {
|
||||||
|
aggregator.specCompletions <- specSummary
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) SpecSuiteDidEnd(summary *types.SuiteSummary) {
|
||||||
|
aggregator.suiteEndings <- summary
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) mux() {
|
||||||
|
loop:
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case configAndSuite := <-aggregator.suiteBeginnings:
|
||||||
|
aggregator.registerSuiteBeginning(configAndSuite)
|
||||||
|
case setupSummary := <-aggregator.beforeSuites:
|
||||||
|
aggregator.registerBeforeSuite(setupSummary)
|
||||||
|
case setupSummary := <-aggregator.afterSuites:
|
||||||
|
aggregator.registerAfterSuite(setupSummary)
|
||||||
|
case specSummary := <-aggregator.specCompletions:
|
||||||
|
aggregator.registerSpecCompletion(specSummary)
|
||||||
|
case suite := <-aggregator.suiteEndings:
|
||||||
|
finished, passed := aggregator.registerSuiteEnding(suite)
|
||||||
|
if finished {
|
||||||
|
aggregator.result <- passed
|
||||||
|
break loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) registerSuiteBeginning(configAndSuite configAndSuite) {
|
||||||
|
aggregator.aggregatedSuiteBeginnings = append(aggregator.aggregatedSuiteBeginnings, configAndSuite)
|
||||||
|
|
||||||
|
if len(aggregator.aggregatedSuiteBeginnings) == 1 {
|
||||||
|
aggregator.startTime = time.Now()
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(aggregator.aggregatedSuiteBeginnings) != aggregator.nodeCount {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
aggregator.stenographer.AnnounceSuite(configAndSuite.summary.SuiteDescription, configAndSuite.config.RandomSeed, configAndSuite.config.RandomizeAllSpecs, aggregator.config.Succinct)
|
||||||
|
|
||||||
|
numberOfSpecsToRun := 0
|
||||||
|
totalNumberOfSpecs := 0
|
||||||
|
for _, configAndSuite := range aggregator.aggregatedSuiteBeginnings {
|
||||||
|
numberOfSpecsToRun += configAndSuite.summary.NumberOfSpecsThatWillBeRun
|
||||||
|
totalNumberOfSpecs += configAndSuite.summary.NumberOfTotalSpecs
|
||||||
|
}
|
||||||
|
|
||||||
|
aggregator.stenographer.AnnounceNumberOfSpecs(numberOfSpecsToRun, totalNumberOfSpecs, aggregator.config.Succinct)
|
||||||
|
aggregator.stenographer.AnnounceAggregatedParallelRun(aggregator.nodeCount, aggregator.config.Succinct)
|
||||||
|
aggregator.flushCompletedSpecs()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) registerBeforeSuite(setupSummary *types.SetupSummary) {
|
||||||
|
aggregator.aggregatedBeforeSuites = append(aggregator.aggregatedBeforeSuites, setupSummary)
|
||||||
|
aggregator.flushCompletedSpecs()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) registerAfterSuite(setupSummary *types.SetupSummary) {
|
||||||
|
aggregator.aggregatedAfterSuites = append(aggregator.aggregatedAfterSuites, setupSummary)
|
||||||
|
aggregator.flushCompletedSpecs()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) registerSpecCompletion(specSummary *types.SpecSummary) {
|
||||||
|
aggregator.completedSpecs = append(aggregator.completedSpecs, specSummary)
|
||||||
|
aggregator.specs = append(aggregator.specs, specSummary)
|
||||||
|
aggregator.flushCompletedSpecs()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) flushCompletedSpecs() {
|
||||||
|
if len(aggregator.aggregatedSuiteBeginnings) != aggregator.nodeCount {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, setupSummary := range aggregator.aggregatedBeforeSuites {
|
||||||
|
aggregator.announceBeforeSuite(setupSummary)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, specSummary := range aggregator.completedSpecs {
|
||||||
|
aggregator.announceSpec(specSummary)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, setupSummary := range aggregator.aggregatedAfterSuites {
|
||||||
|
aggregator.announceAfterSuite(setupSummary)
|
||||||
|
}
|
||||||
|
|
||||||
|
aggregator.aggregatedBeforeSuites = []*types.SetupSummary{}
|
||||||
|
aggregator.completedSpecs = []*types.SpecSummary{}
|
||||||
|
aggregator.aggregatedAfterSuites = []*types.SetupSummary{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) announceBeforeSuite(setupSummary *types.SetupSummary) {
|
||||||
|
aggregator.stenographer.AnnounceCapturedOutput(setupSummary.CapturedOutput)
|
||||||
|
if setupSummary.State != types.SpecStatePassed {
|
||||||
|
aggregator.stenographer.AnnounceBeforeSuiteFailure(setupSummary, aggregator.config.Succinct, aggregator.config.FullTrace)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) announceAfterSuite(setupSummary *types.SetupSummary) {
|
||||||
|
aggregator.stenographer.AnnounceCapturedOutput(setupSummary.CapturedOutput)
|
||||||
|
if setupSummary.State != types.SpecStatePassed {
|
||||||
|
aggregator.stenographer.AnnounceAfterSuiteFailure(setupSummary, aggregator.config.Succinct, aggregator.config.FullTrace)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) announceSpec(specSummary *types.SpecSummary) {
|
||||||
|
if aggregator.config.Verbose && specSummary.State != types.SpecStatePending && specSummary.State != types.SpecStateSkipped {
|
||||||
|
aggregator.stenographer.AnnounceSpecWillRun(specSummary)
|
||||||
|
}
|
||||||
|
|
||||||
|
aggregator.stenographer.AnnounceCapturedOutput(specSummary.CapturedOutput)
|
||||||
|
|
||||||
|
switch specSummary.State {
|
||||||
|
case types.SpecStatePassed:
|
||||||
|
if specSummary.IsMeasurement {
|
||||||
|
aggregator.stenographer.AnnounceSuccesfulMeasurement(specSummary, aggregator.config.Succinct)
|
||||||
|
} else if specSummary.RunTime.Seconds() >= aggregator.config.SlowSpecThreshold {
|
||||||
|
aggregator.stenographer.AnnounceSuccesfulSlowSpec(specSummary, aggregator.config.Succinct)
|
||||||
|
} else {
|
||||||
|
aggregator.stenographer.AnnounceSuccesfulSpec(specSummary)
|
||||||
|
}
|
||||||
|
|
||||||
|
case types.SpecStatePending:
|
||||||
|
aggregator.stenographer.AnnouncePendingSpec(specSummary, aggregator.config.NoisyPendings && !aggregator.config.Succinct)
|
||||||
|
case types.SpecStateSkipped:
|
||||||
|
aggregator.stenographer.AnnounceSkippedSpec(specSummary, aggregator.config.Succinct, aggregator.config.FullTrace)
|
||||||
|
case types.SpecStateTimedOut:
|
||||||
|
aggregator.stenographer.AnnounceSpecTimedOut(specSummary, aggregator.config.Succinct, aggregator.config.FullTrace)
|
||||||
|
case types.SpecStatePanicked:
|
||||||
|
aggregator.stenographer.AnnounceSpecPanicked(specSummary, aggregator.config.Succinct, aggregator.config.FullTrace)
|
||||||
|
case types.SpecStateFailed:
|
||||||
|
aggregator.stenographer.AnnounceSpecFailed(specSummary, aggregator.config.Succinct, aggregator.config.FullTrace)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (aggregator *Aggregator) registerSuiteEnding(suite *types.SuiteSummary) (finished bool, passed bool) {
|
||||||
|
aggregator.aggregatedSuiteEndings = append(aggregator.aggregatedSuiteEndings, suite)
|
||||||
|
if len(aggregator.aggregatedSuiteEndings) < aggregator.nodeCount {
|
||||||
|
return false, false
|
||||||
|
}
|
||||||
|
|
||||||
|
aggregatedSuiteSummary := &types.SuiteSummary{}
|
||||||
|
aggregatedSuiteSummary.SuiteSucceeded = true
|
||||||
|
|
||||||
|
for _, suiteSummary := range aggregator.aggregatedSuiteEndings {
|
||||||
|
if suiteSummary.SuiteSucceeded == false {
|
||||||
|
aggregatedSuiteSummary.SuiteSucceeded = false
|
||||||
|
}
|
||||||
|
|
||||||
|
aggregatedSuiteSummary.NumberOfSpecsThatWillBeRun += suiteSummary.NumberOfSpecsThatWillBeRun
|
||||||
|
aggregatedSuiteSummary.NumberOfTotalSpecs += suiteSummary.NumberOfTotalSpecs
|
||||||
|
aggregatedSuiteSummary.NumberOfPassedSpecs += suiteSummary.NumberOfPassedSpecs
|
||||||
|
aggregatedSuiteSummary.NumberOfFailedSpecs += suiteSummary.NumberOfFailedSpecs
|
||||||
|
aggregatedSuiteSummary.NumberOfPendingSpecs += suiteSummary.NumberOfPendingSpecs
|
||||||
|
aggregatedSuiteSummary.NumberOfSkippedSpecs += suiteSummary.NumberOfSkippedSpecs
|
||||||
|
aggregatedSuiteSummary.NumberOfFlakedSpecs += suiteSummary.NumberOfFlakedSpecs
|
||||||
|
}
|
||||||
|
|
||||||
|
aggregatedSuiteSummary.RunTime = time.Since(aggregator.startTime)
|
||||||
|
|
||||||
|
aggregator.stenographer.SummarizeFailures(aggregator.specs)
|
||||||
|
aggregator.stenographer.AnnounceSpecRunCompletion(aggregatedSuiteSummary, aggregator.config.Succinct)
|
||||||
|
|
||||||
|
return true, aggregatedSuiteSummary.SuiteSucceeded
|
||||||
|
}
|
314
vendor/github.com/onsi/ginkgo/internal/remote/aggregator_test.go
generated
vendored
Normal file
314
vendor/github.com/onsi/ginkgo/internal/remote/aggregator_test.go
generated
vendored
Normal file
@ -0,0 +1,314 @@
|
|||||||
|
package remote_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"github.com/onsi/ginkgo/config"
|
||||||
|
. "github.com/onsi/ginkgo/internal/remote"
|
||||||
|
st "github.com/onsi/ginkgo/reporters/stenographer"
|
||||||
|
"github.com/onsi/ginkgo/types"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Aggregator", func() {
|
||||||
|
var (
|
||||||
|
aggregator *Aggregator
|
||||||
|
reporterConfig config.DefaultReporterConfigType
|
||||||
|
stenographer *st.FakeStenographer
|
||||||
|
result chan bool
|
||||||
|
|
||||||
|
ginkgoConfig1 config.GinkgoConfigType
|
||||||
|
ginkgoConfig2 config.GinkgoConfigType
|
||||||
|
|
||||||
|
suiteSummary1 *types.SuiteSummary
|
||||||
|
suiteSummary2 *types.SuiteSummary
|
||||||
|
|
||||||
|
beforeSummary *types.SetupSummary
|
||||||
|
afterSummary *types.SetupSummary
|
||||||
|
specSummary *types.SpecSummary
|
||||||
|
|
||||||
|
suiteDescription string
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
reporterConfig = config.DefaultReporterConfigType{
|
||||||
|
NoColor: false,
|
||||||
|
SlowSpecThreshold: 0.1,
|
||||||
|
NoisyPendings: true,
|
||||||
|
Succinct: false,
|
||||||
|
Verbose: true,
|
||||||
|
}
|
||||||
|
stenographer = st.NewFakeStenographer()
|
||||||
|
result = make(chan bool, 1)
|
||||||
|
aggregator = NewAggregator(2, result, reporterConfig, stenographer)
|
||||||
|
|
||||||
|
//
|
||||||
|
// now set up some fixture data
|
||||||
|
//
|
||||||
|
|
||||||
|
ginkgoConfig1 = config.GinkgoConfigType{
|
||||||
|
RandomSeed: 1138,
|
||||||
|
RandomizeAllSpecs: true,
|
||||||
|
ParallelNode: 1,
|
||||||
|
ParallelTotal: 2,
|
||||||
|
}
|
||||||
|
|
||||||
|
ginkgoConfig2 = config.GinkgoConfigType{
|
||||||
|
RandomSeed: 1138,
|
||||||
|
RandomizeAllSpecs: true,
|
||||||
|
ParallelNode: 2,
|
||||||
|
ParallelTotal: 2,
|
||||||
|
}
|
||||||
|
|
||||||
|
suiteDescription = "My Parallel Suite"
|
||||||
|
|
||||||
|
suiteSummary1 = &types.SuiteSummary{
|
||||||
|
SuiteDescription: suiteDescription,
|
||||||
|
|
||||||
|
NumberOfSpecsBeforeParallelization: 30,
|
||||||
|
NumberOfTotalSpecs: 17,
|
||||||
|
NumberOfSpecsThatWillBeRun: 15,
|
||||||
|
NumberOfPendingSpecs: 1,
|
||||||
|
NumberOfSkippedSpecs: 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
suiteSummary2 = &types.SuiteSummary{
|
||||||
|
SuiteDescription: suiteDescription,
|
||||||
|
|
||||||
|
NumberOfSpecsBeforeParallelization: 30,
|
||||||
|
NumberOfTotalSpecs: 13,
|
||||||
|
NumberOfSpecsThatWillBeRun: 8,
|
||||||
|
NumberOfPendingSpecs: 2,
|
||||||
|
NumberOfSkippedSpecs: 3,
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeSummary = &types.SetupSummary{
|
||||||
|
State: types.SpecStatePassed,
|
||||||
|
CapturedOutput: "BeforeSuiteOutput",
|
||||||
|
}
|
||||||
|
|
||||||
|
afterSummary = &types.SetupSummary{
|
||||||
|
State: types.SpecStatePassed,
|
||||||
|
CapturedOutput: "AfterSuiteOutput",
|
||||||
|
}
|
||||||
|
|
||||||
|
specSummary = &types.SpecSummary{
|
||||||
|
State: types.SpecStatePassed,
|
||||||
|
CapturedOutput: "SpecOutput",
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
call := func(method string, args ...interface{}) st.FakeStenographerCall {
|
||||||
|
return st.NewFakeStenographerCall(method, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
beginSuite := func() {
|
||||||
|
stenographer.Reset()
|
||||||
|
aggregator.SpecSuiteWillBegin(ginkgoConfig2, suiteSummary2)
|
||||||
|
aggregator.SpecSuiteWillBegin(ginkgoConfig1, suiteSummary1)
|
||||||
|
Eventually(func() interface{} {
|
||||||
|
return len(stenographer.Calls())
|
||||||
|
}).Should(BeNumerically(">=", 3))
|
||||||
|
}
|
||||||
|
|
||||||
|
Describe("Announcing the beginning of the suite", func() {
|
||||||
|
Context("When one of the parallel-suites starts", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
aggregator.SpecSuiteWillBegin(ginkgoConfig2, suiteSummary2)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be silent", func() {
|
||||||
|
Consistently(func() interface{} { return stenographer.Calls() }).Should(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("once all of the parallel-suites have started", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
aggregator.SpecSuiteWillBegin(ginkgoConfig2, suiteSummary2)
|
||||||
|
aggregator.SpecSuiteWillBegin(ginkgoConfig1, suiteSummary1)
|
||||||
|
Eventually(func() interface{} {
|
||||||
|
return stenographer.Calls()
|
||||||
|
}).Should(HaveLen(3))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should announce the beginning of the suite", func() {
|
||||||
|
Ω(stenographer.Calls()).Should(HaveLen(3))
|
||||||
|
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuite", suiteDescription, ginkgoConfig1.RandomSeed, true, false)))
|
||||||
|
Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceNumberOfSpecs", 23, 30, false)))
|
||||||
|
Ω(stenographer.Calls()[2]).Should(Equal(call("AnnounceAggregatedParallelRun", 2, false)))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Announcing specs and before suites", func() {
|
||||||
|
Context("when the parallel-suites have not all started", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
aggregator.BeforeSuiteDidRun(beforeSummary)
|
||||||
|
aggregator.AfterSuiteDidRun(afterSummary)
|
||||||
|
aggregator.SpecDidComplete(specSummary)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not announce any specs", func() {
|
||||||
|
Consistently(func() interface{} { return stenographer.Calls() }).Should(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when the parallel-suites subsequently start", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
beginSuite()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should announce the specs, the before suites and the after suites", func() {
|
||||||
|
Eventually(func() interface{} {
|
||||||
|
return stenographer.Calls()
|
||||||
|
}).Should(ContainElement(call("AnnounceSuccesfulSpec", specSummary)))
|
||||||
|
|
||||||
|
Ω(stenographer.Calls()).Should(ContainElement(call("AnnounceCapturedOutput", beforeSummary.CapturedOutput)))
|
||||||
|
Ω(stenographer.Calls()).Should(ContainElement(call("AnnounceCapturedOutput", afterSummary.CapturedOutput)))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("When the parallel-suites have all started", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
beginSuite()
|
||||||
|
stenographer.Reset()
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("When a spec completes", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
aggregator.BeforeSuiteDidRun(beforeSummary)
|
||||||
|
aggregator.SpecDidComplete(specSummary)
|
||||||
|
aggregator.AfterSuiteDidRun(afterSummary)
|
||||||
|
Eventually(func() interface{} {
|
||||||
|
return stenographer.Calls()
|
||||||
|
}).Should(HaveLen(5))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should announce the captured output of the BeforeSuite", func() {
|
||||||
|
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceCapturedOutput", beforeSummary.CapturedOutput)))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should announce that the spec will run (when in verbose mode)", func() {
|
||||||
|
Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceSpecWillRun", specSummary)))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should announce the captured stdout of the spec", func() {
|
||||||
|
Ω(stenographer.Calls()[2]).Should(Equal(call("AnnounceCapturedOutput", specSummary.CapturedOutput)))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should announce completion", func() {
|
||||||
|
Ω(stenographer.Calls()[3]).Should(Equal(call("AnnounceSuccesfulSpec", specSummary)))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should announce the captured output of the AfterSuite", func() {
|
||||||
|
Ω(stenographer.Calls()[4]).Should(Equal(call("AnnounceCapturedOutput", afterSummary.CapturedOutput)))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Announcing the end of the suite", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
beginSuite()
|
||||||
|
stenographer.Reset()
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("When one of the parallel-suites ends", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
aggregator.SpecSuiteDidEnd(suiteSummary2)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be silent", func() {
|
||||||
|
Consistently(func() interface{} { return stenographer.Calls() }).Should(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not notify the channel", func() {
|
||||||
|
Ω(result).Should(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("once all of the parallel-suites end", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
time.Sleep(200 * time.Millisecond)
|
||||||
|
|
||||||
|
suiteSummary1.SuiteSucceeded = true
|
||||||
|
suiteSummary1.NumberOfPassedSpecs = 15
|
||||||
|
suiteSummary1.NumberOfFailedSpecs = 0
|
||||||
|
suiteSummary1.NumberOfFlakedSpecs = 3
|
||||||
|
suiteSummary2.SuiteSucceeded = false
|
||||||
|
suiteSummary2.NumberOfPassedSpecs = 5
|
||||||
|
suiteSummary2.NumberOfFailedSpecs = 3
|
||||||
|
suiteSummary2.NumberOfFlakedSpecs = 4
|
||||||
|
|
||||||
|
aggregator.SpecSuiteDidEnd(suiteSummary2)
|
||||||
|
aggregator.SpecSuiteDidEnd(suiteSummary1)
|
||||||
|
Eventually(func() interface{} {
|
||||||
|
return stenographer.Calls()
|
||||||
|
}).Should(HaveLen(2))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should announce the end of the suite", func() {
|
||||||
|
compositeSummary := stenographer.Calls()[1].Args[0].(*types.SuiteSummary)
|
||||||
|
|
||||||
|
Ω(compositeSummary.SuiteSucceeded).Should(BeFalse())
|
||||||
|
Ω(compositeSummary.NumberOfSpecsThatWillBeRun).Should(Equal(23))
|
||||||
|
Ω(compositeSummary.NumberOfTotalSpecs).Should(Equal(30))
|
||||||
|
Ω(compositeSummary.NumberOfPassedSpecs).Should(Equal(20))
|
||||||
|
Ω(compositeSummary.NumberOfFailedSpecs).Should(Equal(3))
|
||||||
|
Ω(compositeSummary.NumberOfPendingSpecs).Should(Equal(3))
|
||||||
|
Ω(compositeSummary.NumberOfSkippedSpecs).Should(Equal(4))
|
||||||
|
Ω(compositeSummary.NumberOfFlakedSpecs).Should(Equal(7))
|
||||||
|
Ω(compositeSummary.RunTime.Seconds()).Should(BeNumerically(">", 0.2))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when all the parallel-suites pass", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
suiteSummary1.SuiteSucceeded = true
|
||||||
|
suiteSummary2.SuiteSucceeded = true
|
||||||
|
|
||||||
|
aggregator.SpecSuiteDidEnd(suiteSummary2)
|
||||||
|
aggregator.SpecSuiteDidEnd(suiteSummary1)
|
||||||
|
Eventually(func() interface{} {
|
||||||
|
return stenographer.Calls()
|
||||||
|
}).Should(HaveLen(2))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report success", func() {
|
||||||
|
compositeSummary := stenographer.Calls()[1].Args[0].(*types.SuiteSummary)
|
||||||
|
|
||||||
|
Ω(compositeSummary.SuiteSucceeded).Should(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should notify the channel that it succeded", func(done Done) {
|
||||||
|
Ω(<-result).Should(BeTrue())
|
||||||
|
close(done)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when one of the parallel-suites fails", func() {
|
||||||
|
BeforeEach(func() {
|
||||||
|
suiteSummary1.SuiteSucceeded = true
|
||||||
|
suiteSummary2.SuiteSucceeded = false
|
||||||
|
|
||||||
|
aggregator.SpecSuiteDidEnd(suiteSummary2)
|
||||||
|
aggregator.SpecSuiteDidEnd(suiteSummary1)
|
||||||
|
Eventually(func() interface{} {
|
||||||
|
return stenographer.Calls()
|
||||||
|
}).Should(HaveLen(2))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should report failure", func() {
|
||||||
|
compositeSummary := stenographer.Calls()[1].Args[0].(*types.SuiteSummary)
|
||||||
|
|
||||||
|
Ω(compositeSummary.SuiteSucceeded).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should notify the channel that it failed", func(done Done) {
|
||||||
|
Ω(<-result).Should(BeFalse())
|
||||||
|
close(done)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
17
vendor/github.com/onsi/ginkgo/internal/remote/fake_output_interceptor_test.go
generated
vendored
Normal file
17
vendor/github.com/onsi/ginkgo/internal/remote/fake_output_interceptor_test.go
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
package remote_test
|
||||||
|
|
||||||
|
type fakeOutputInterceptor struct {
|
||||||
|
DidStartInterceptingOutput bool
|
||||||
|
DidStopInterceptingOutput bool
|
||||||
|
InterceptedOutput string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (interceptor *fakeOutputInterceptor) StartInterceptingOutput() error {
|
||||||
|
interceptor.DidStartInterceptingOutput = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (interceptor *fakeOutputInterceptor) StopInterceptingAndReturnOutput() (string, error) {
|
||||||
|
interceptor.DidStopInterceptingOutput = true
|
||||||
|
return interceptor.InterceptedOutput, nil
|
||||||
|
}
|
33
vendor/github.com/onsi/ginkgo/internal/remote/fake_poster_test.go
generated
vendored
Normal file
33
vendor/github.com/onsi/ginkgo/internal/remote/fake_poster_test.go
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
package remote_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
type post struct {
|
||||||
|
url string
|
||||||
|
bodyType string
|
||||||
|
bodyContent []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
type fakePoster struct {
|
||||||
|
posts []post
|
||||||
|
}
|
||||||
|
|
||||||
|
func newFakePoster() *fakePoster {
|
||||||
|
return &fakePoster{
|
||||||
|
posts: make([]post, 0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (poster *fakePoster) Post(url string, bodyType string, body io.Reader) (resp *http.Response, err error) {
|
||||||
|
bodyContent, _ := ioutil.ReadAll(body)
|
||||||
|
poster.posts = append(poster.posts, post{
|
||||||
|
url: url,
|
||||||
|
bodyType: bodyType,
|
||||||
|
bodyContent: bodyContent,
|
||||||
|
})
|
||||||
|
return nil, nil
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user