update gomega and ginkgo
[#105040168] Signed-off-by: David Morhovich <dmorhovich@pivotal.io>
This commit is contained in:
committed by
David Morhovich
parent
df4c9d8ccf
commit
a7eb775171
486
Godeps/_workspace/src/code.google.com/p/goauth2/oauth/jwt/jwt_test.go
generated
vendored
486
Godeps/_workspace/src/code.google.com/p/goauth2/oauth/jwt/jwt_test.go
generated
vendored
@@ -1,486 +0,0 @@
|
||||
// Copyright 2012 The goauth2 Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// For package documentation please see jwt.go.
|
||||
//
|
||||
package jwt
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"crypto/sha256"
|
||||
"crypto/x509"
|
||||
"encoding/json"
|
||||
"encoding/pem"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
stdHeaderStr = `{"alg":"RS256","typ":"JWT"}`
|
||||
iss = "761326798069-r5mljlln1rd4lrbhg75efgigp36m78j5@developer.gserviceaccount.com"
|
||||
scope = "https://www.googleapis.com/auth/prediction"
|
||||
exp = 1328554385
|
||||
iat = 1328550785 // exp + 1 hour
|
||||
)
|
||||
|
||||
// Base64url encoded Header
|
||||
const headerEnc = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9"
|
||||
|
||||
// Base64url encoded ClaimSet
|
||||
const claimSetEnc = "eyJpc3MiOiI3NjEzMjY3OTgwNjktcjVtbGpsbG4xcmQ0bHJiaGc3NWVmZ2lncDM2bTc4ajVAZGV2ZWxvcGVyLmdzZXJ2aWNlYWNjb3VudC5jb20iLCJzY29wZSI6Imh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvcHJlZGljdGlvbiIsImF1ZCI6Imh0dHBzOi8vYWNjb3VudHMuZ29vZ2xlLmNvbS9vL29hdXRoMi90b2tlbiIsImV4cCI6MTMyODU1NDM4NSwiaWF0IjoxMzI4NTUwNzg1fQ"
|
||||
|
||||
// Base64url encoded Signature
|
||||
const sigEnc = "olukbHreNiYrgiGCTEmY3eWGeTvYDSUHYoE84Jz3BRPBSaMdZMNOn_0CYK7UHPO7OdvUofjwft1dH59UxE9GWS02pjFti1uAQoImaqjLZoTXr8qiF6O_kDa9JNoykklWlRAIwGIZkDupCS-8cTAnM_ksSymiH1coKJrLDUX_BM0x2f4iMFQzhL5vT1ll-ZipJ0lNlxb5QsyXxDYcxtHYguF12-vpv3ItgT0STfcXoWzIGQoEbhwB9SBp9JYcQ8Ygz6pYDjm0rWX9LrchmTyDArCodpKLFtutNgcIFUP9fWxvwd1C2dNw5GjLcKr9a_SAERyoJ2WnCR1_j9N0wD2o0g"
|
||||
|
||||
// Base64url encoded Token
|
||||
const tokEnc = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiI3NjEzMjY3OTgwNjktcjVtbGpsbG4xcmQ0bHJiaGc3NWVmZ2lncDM2bTc4ajVAZGV2ZWxvcGVyLmdzZXJ2aWNlYWNjb3VudC5jb20iLCJzY29wZSI6Imh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvcHJlZGljdGlvbiIsImF1ZCI6Imh0dHBzOi8vYWNjb3VudHMuZ29vZ2xlLmNvbS9vL29hdXRoMi90b2tlbiIsImV4cCI6MTMyODU1NDM4NSwiaWF0IjoxMzI4NTUwNzg1fQ.olukbHreNiYrgiGCTEmY3eWGeTvYDSUHYoE84Jz3BRPBSaMdZMNOn_0CYK7UHPO7OdvUofjwft1dH59UxE9GWS02pjFti1uAQoImaqjLZoTXr8qiF6O_kDa9JNoykklWlRAIwGIZkDupCS-8cTAnM_ksSymiH1coKJrLDUX_BM0x2f4iMFQzhL5vT1ll-ZipJ0lNlxb5QsyXxDYcxtHYguF12-vpv3ItgT0STfcXoWzIGQoEbhwB9SBp9JYcQ8Ygz6pYDjm0rWX9LrchmTyDArCodpKLFtutNgcIFUP9fWxvwd1C2dNw5GjLcKr9a_SAERyoJ2WnCR1_j9N0wD2o0g"
|
||||
|
||||
// Private key for testing
|
||||
const privateKeyPem = `-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEpAIBAAKCAQEA4ej0p7bQ7L/r4rVGUz9RN4VQWoej1Bg1mYWIDYslvKrk1gpj
|
||||
7wZgkdmM7oVK2OfgrSj/FCTkInKPqaCR0gD7K80q+mLBrN3PUkDrJQZpvRZIff3/
|
||||
xmVU1WeruQLFJjnFb2dqu0s/FY/2kWiJtBCakXvXEOb7zfbINuayL+MSsCGSdVYs
|
||||
SliS5qQpgyDap+8b5fpXZVJkq92hrcNtbkg7hCYUJczt8n9hcCTJCfUpApvaFQ18
|
||||
pe+zpyl4+WzkP66I28hniMQyUlA1hBiskT7qiouq0m8IOodhv2fagSZKjOTTU2xk
|
||||
SBc//fy3ZpsL7WqgsZS7Q+0VRK8gKfqkxg5OYQIDAQABAoIBAQDGGHzQxGKX+ANk
|
||||
nQi53v/c6632dJKYXVJC+PDAz4+bzU800Y+n/bOYsWf/kCp94XcG4Lgsdd0Gx+Zq
|
||||
HD9CI1IcqqBRR2AFscsmmX6YzPLTuEKBGMW8twaYy3utlFxElMwoUEsrSWRcCA1y
|
||||
nHSDzTt871c7nxCXHxuZ6Nm/XCL7Bg8uidRTSC1sQrQyKgTPhtQdYrPQ4WZ1A4J9
|
||||
IisyDYmZodSNZe5P+LTJ6M1SCgH8KH9ZGIxv3diMwzNNpk3kxJc9yCnja4mjiGE2
|
||||
YCNusSycU5IhZwVeCTlhQGcNeV/skfg64xkiJE34c2y2ttFbdwBTPixStGaF09nU
|
||||
Z422D40BAoGBAPvVyRRsC3BF+qZdaSMFwI1yiXY7vQw5+JZh01tD28NuYdRFzjcJ
|
||||
vzT2n8LFpj5ZfZFvSMLMVEFVMgQvWnN0O6xdXvGov6qlRUSGaH9u+TCPNnIldjMP
|
||||
B8+xTwFMqI7uQr54wBB+Poq7dVRP+0oHb0NYAwUBXoEuvYo3c/nDoRcZAoGBAOWl
|
||||
aLHjMv4CJbArzT8sPfic/8waSiLV9Ixs3Re5YREUTtnLq7LoymqB57UXJB3BNz/2
|
||||
eCueuW71avlWlRtE/wXASj5jx6y5mIrlV4nZbVuyYff0QlcG+fgb6pcJQuO9DxMI
|
||||
aqFGrWP3zye+LK87a6iR76dS9vRU+bHZpSVvGMKJAoGAFGt3TIKeQtJJyqeUWNSk
|
||||
klORNdcOMymYMIlqG+JatXQD1rR6ThgqOt8sgRyJqFCVT++YFMOAqXOBBLnaObZZ
|
||||
CFbh1fJ66BlSjoXff0W+SuOx5HuJJAa5+WtFHrPajwxeuRcNa8jwxUsB7n41wADu
|
||||
UqWWSRedVBg4Ijbw3nWwYDECgYB0pLew4z4bVuvdt+HgnJA9n0EuYowVdadpTEJg
|
||||
soBjNHV4msLzdNqbjrAqgz6M/n8Ztg8D2PNHMNDNJPVHjJwcR7duSTA6w2p/4k28
|
||||
bvvk/45Ta3XmzlxZcZSOct3O31Cw0i2XDVc018IY5be8qendDYM08icNo7vQYkRH
|
||||
504kQQKBgQDjx60zpz8ozvm1XAj0wVhi7GwXe+5lTxiLi9Fxq721WDxPMiHDW2XL
|
||||
YXfFVy/9/GIMvEiGYdmarK1NW+VhWl1DC5xhDg0kvMfxplt4tynoq1uTsQTY31Mx
|
||||
BeF5CT/JuNYk3bEBF0H/Q3VGO1/ggVS+YezdFbLWIRoMnLj6XCFEGg==
|
||||
-----END RSA PRIVATE KEY-----`
|
||||
|
||||
// Public key to go with the private key for testing
|
||||
const publicKeyPem = `-----BEGIN CERTIFICATE-----
|
||||
MIIDIzCCAgugAwIBAgIJAMfISuBQ5m+5MA0GCSqGSIb3DQEBBQUAMBUxEzARBgNV
|
||||
BAMTCnVuaXQtdGVzdHMwHhcNMTExMjA2MTYyNjAyWhcNMjExMjAzMTYyNjAyWjAV
|
||||
MRMwEQYDVQQDEwp1bml0LXRlc3RzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
|
||||
CgKCAQEA4ej0p7bQ7L/r4rVGUz9RN4VQWoej1Bg1mYWIDYslvKrk1gpj7wZgkdmM
|
||||
7oVK2OfgrSj/FCTkInKPqaCR0gD7K80q+mLBrN3PUkDrJQZpvRZIff3/xmVU1Wer
|
||||
uQLFJjnFb2dqu0s/FY/2kWiJtBCakXvXEOb7zfbINuayL+MSsCGSdVYsSliS5qQp
|
||||
gyDap+8b5fpXZVJkq92hrcNtbkg7hCYUJczt8n9hcCTJCfUpApvaFQ18pe+zpyl4
|
||||
+WzkP66I28hniMQyUlA1hBiskT7qiouq0m8IOodhv2fagSZKjOTTU2xkSBc//fy3
|
||||
ZpsL7WqgsZS7Q+0VRK8gKfqkxg5OYQIDAQABo3YwdDAdBgNVHQ4EFgQU2RQ8yO+O
|
||||
gN8oVW2SW7RLrfYd9jEwRQYDVR0jBD4wPIAU2RQ8yO+OgN8oVW2SW7RLrfYd9jGh
|
||||
GaQXMBUxEzARBgNVBAMTCnVuaXQtdGVzdHOCCQDHyErgUOZvuTAMBgNVHRMEBTAD
|
||||
AQH/MA0GCSqGSIb3DQEBBQUAA4IBAQBRv+M/6+FiVu7KXNjFI5pSN17OcW5QUtPr
|
||||
odJMlWrJBtynn/TA1oJlYu3yV5clc/71Vr/AxuX5xGP+IXL32YDF9lTUJXG/uUGk
|
||||
+JETpKmQviPbRsvzYhz4pf6ZIOZMc3/GIcNq92ECbseGO+yAgyWUVKMmZM0HqXC9
|
||||
ovNslqe0M8C1sLm1zAR5z/h/litE7/8O2ietija3Q/qtl2TOXJdCA6sgjJX2WUql
|
||||
ybrC55ct18NKf3qhpcEkGQvFU40rVYApJpi98DiZPYFdx1oBDp/f4uZ3ojpxRVFT
|
||||
cDwcJLfNRCPUhormsY7fDS9xSyThiHsW9mjJYdcaKQkwYZ0F11yB
|
||||
-----END CERTIFICATE-----`
|
||||
|
||||
var (
|
||||
privateKeyPemBytes = []byte(privateKeyPem)
|
||||
publicKeyPemBytes = []byte(publicKeyPem)
|
||||
stdHeader = &Header{Algorithm: stdAlgorithm, Type: stdType}
|
||||
)
|
||||
|
||||
// Testing the urlEncode function.
|
||||
func TestUrlEncode(t *testing.T) {
|
||||
enc := base64Encode([]byte(stdHeaderStr))
|
||||
b := []byte(enc)
|
||||
if b[len(b)-1] == 61 {
|
||||
t.Error("TestUrlEncode: last chat == \"=\"")
|
||||
}
|
||||
if enc != headerEnc {
|
||||
t.Error("TestUrlEncode: enc != headerEnc")
|
||||
t.Errorf(" enc = %s", enc)
|
||||
t.Errorf(" headerEnc = %s", headerEnc)
|
||||
}
|
||||
}
|
||||
|
||||
// Test that the times are set properly.
|
||||
func TestClaimSetSetTimes(t *testing.T) {
|
||||
c := &ClaimSet{
|
||||
Iss: iss,
|
||||
Scope: scope,
|
||||
}
|
||||
iat := time.Unix(iat, 0)
|
||||
c.setTimes(iat)
|
||||
if c.exp.Unix() != exp {
|
||||
t.Error("TestClaimSetSetTimes: c.exp != exp")
|
||||
t.Errorf(" c.Exp = %d", c.exp.Unix())
|
||||
t.Errorf(" exp = %d", exp)
|
||||
}
|
||||
}
|
||||
|
||||
// Given a well formed ClaimSet, test for proper encoding.
|
||||
func TestClaimSetEncode(t *testing.T) {
|
||||
c := &ClaimSet{
|
||||
Iss: iss,
|
||||
Scope: scope,
|
||||
exp: time.Unix(exp, 0),
|
||||
iat: time.Unix(iat, 0),
|
||||
}
|
||||
enc := c.encode()
|
||||
re, err := base64Decode(enc)
|
||||
if err != nil {
|
||||
t.Fatalf("error decoding encoded claim set: %v", err)
|
||||
}
|
||||
|
||||
wa, err := base64Decode(claimSetEnc)
|
||||
if err != nil {
|
||||
t.Fatalf("error decoding encoded expected claim set: %v", err)
|
||||
}
|
||||
|
||||
if enc != claimSetEnc {
|
||||
t.Error("TestClaimSetEncode: enc != claimSetEnc")
|
||||
t.Errorf(" enc = %s", string(re))
|
||||
t.Errorf(" claimSetEnc = %s", string(wa))
|
||||
}
|
||||
}
|
||||
|
||||
// Test that claim sets with private claim names are encoded correctly.
|
||||
func TestClaimSetWithPrivateNameEncode(t *testing.T) {
|
||||
iatT := time.Unix(iat, 0)
|
||||
expT := time.Unix(exp, 0)
|
||||
|
||||
i, err := json.Marshal(iatT.Unix())
|
||||
if err != nil {
|
||||
t.Fatalf("error marshaling iatT value of %v: %v", iatT.Unix(), err)
|
||||
}
|
||||
iatStr := string(i)
|
||||
e, err := json.Marshal(expT.Unix())
|
||||
if err != nil {
|
||||
t.Fatalf("error marshaling expT value of %v: %v", expT.Unix(), err)
|
||||
}
|
||||
|
||||
expStr := string(e)
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
input map[string]interface{}
|
||||
want string
|
||||
}{
|
||||
// Test a simple int field.
|
||||
{
|
||||
"single simple field",
|
||||
map[string]interface{}{"amount": 22},
|
||||
`{` +
|
||||
`"iss":"` + iss + `",` +
|
||||
`"scope":"` + scope + `",` +
|
||||
`"aud":"` + stdAud + `",` +
|
||||
`"exp":` + expStr + `,` +
|
||||
`"iat":` + iatStr + `,` +
|
||||
`"amount":22` +
|
||||
`}`,
|
||||
},
|
||||
{
|
||||
"multiple simple fields",
|
||||
map[string]interface{}{"tracking_code": "axZf", "amount": 22},
|
||||
`{` +
|
||||
`"iss":"` + iss + `",` +
|
||||
`"scope":"` + scope + `",` +
|
||||
`"aud":"` + stdAud + `",` +
|
||||
`"exp":` + expStr + `,` +
|
||||
`"iat":` + iatStr + `,` +
|
||||
`"amount":22,` +
|
||||
`"tracking_code":"axZf"` +
|
||||
`}`,
|
||||
},
|
||||
{
|
||||
"nested struct fields",
|
||||
map[string]interface{}{
|
||||
"tracking_code": "axZf",
|
||||
"purchase": struct {
|
||||
Description string `json:"desc"`
|
||||
Quantity int32 `json:"q"`
|
||||
Time int64 `json:"t"`
|
||||
}{
|
||||
"toaster",
|
||||
5,
|
||||
iat,
|
||||
},
|
||||
},
|
||||
`{` +
|
||||
`"iss":"` + iss + `",` +
|
||||
`"scope":"` + scope + `",` +
|
||||
`"aud":"` + stdAud + `",` +
|
||||
`"exp":` + expStr + `,` +
|
||||
`"iat":` + iatStr + `,` +
|
||||
`"purchase":{"desc":"toaster","q":5,"t":` + iatStr + `},` +
|
||||
`"tracking_code":"axZf"` +
|
||||
`}`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
c := &ClaimSet{
|
||||
Iss: iss,
|
||||
Scope: scope,
|
||||
Aud: stdAud,
|
||||
iat: iatT,
|
||||
exp: expT,
|
||||
PrivateClaims: testCase.input,
|
||||
}
|
||||
cJSON, err := base64Decode(c.encode())
|
||||
if err != nil {
|
||||
t.Fatalf("error decoding claim set: %v", err)
|
||||
}
|
||||
if string(cJSON) != testCase.want {
|
||||
t.Errorf("TestClaimSetWithPrivateNameEncode: enc != want in case %s", testCase.desc)
|
||||
t.Errorf(" enc = %s", cJSON)
|
||||
t.Errorf(" want = %s", testCase.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test the NewToken constructor.
|
||||
func TestNewToken(t *testing.T) {
|
||||
tok := NewToken(iss, scope, privateKeyPemBytes)
|
||||
if tok.ClaimSet.Iss != iss {
|
||||
t.Error("TestNewToken: tok.ClaimSet.Iss != iss")
|
||||
t.Errorf(" tok.ClaimSet.Iss = %s", tok.ClaimSet.Iss)
|
||||
t.Errorf(" iss = %s", iss)
|
||||
}
|
||||
if tok.ClaimSet.Scope != scope {
|
||||
t.Error("TestNewToken: tok.ClaimSet.Scope != scope")
|
||||
t.Errorf(" tok.ClaimSet.Scope = %s", tok.ClaimSet.Scope)
|
||||
t.Errorf(" scope = %s", scope)
|
||||
}
|
||||
if tok.ClaimSet.Aud != stdAud {
|
||||
t.Error("TestNewToken: tok.ClaimSet.Aud != stdAud")
|
||||
t.Errorf(" tok.ClaimSet.Aud = %s", tok.ClaimSet.Aud)
|
||||
t.Errorf(" stdAud = %s", stdAud)
|
||||
}
|
||||
if !bytes.Equal(tok.Key, privateKeyPemBytes) {
|
||||
t.Error("TestNewToken: tok.Key != privateKeyPemBytes")
|
||||
t.Errorf(" tok.Key = %s", tok.Key)
|
||||
t.Errorf(" privateKeyPemBytes = %s", privateKeyPemBytes)
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure the private key parsing functions work.
|
||||
func TestParsePrivateKey(t *testing.T) {
|
||||
tok := &Token{
|
||||
Key: privateKeyPemBytes,
|
||||
}
|
||||
err := tok.parsePrivateKey()
|
||||
if err != nil {
|
||||
t.Errorf("TestParsePrivateKey:tok.parsePrivateKey: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Test that the token signature generated matches the golden standard.
|
||||
func TestTokenSign(t *testing.T) {
|
||||
tok := &Token{
|
||||
Key: privateKeyPemBytes,
|
||||
claim: claimSetEnc,
|
||||
header: headerEnc,
|
||||
}
|
||||
err := tok.parsePrivateKey()
|
||||
if err != nil {
|
||||
t.Errorf("TestTokenSign:tok.parsePrivateKey: %v", err)
|
||||
}
|
||||
err = tok.sign()
|
||||
if err != nil {
|
||||
t.Errorf("TestTokenSign:tok.sign: %v", err)
|
||||
}
|
||||
if tok.sig != sigEnc {
|
||||
t.Error("TestTokenSign: tok.sig != sigEnc")
|
||||
t.Errorf(" tok.sig = %s", tok.sig)
|
||||
t.Errorf(" sigEnc = %s", sigEnc)
|
||||
}
|
||||
}
|
||||
|
||||
// Test that the token expiration function is working.
|
||||
func TestTokenExpired(t *testing.T) {
|
||||
c := &ClaimSet{}
|
||||
tok := &Token{
|
||||
ClaimSet: c,
|
||||
}
|
||||
now := time.Now()
|
||||
c.setTimes(now)
|
||||
if tok.Expired() != false {
|
||||
t.Error("TestTokenExpired: tok.Expired != false")
|
||||
}
|
||||
// Set the times as if they were set 2 hours ago.
|
||||
c.setTimes(now.Add(-2 * time.Hour))
|
||||
if tok.Expired() != true {
|
||||
t.Error("TestTokenExpired: tok.Expired != true")
|
||||
}
|
||||
}
|
||||
|
||||
// Given a well formed Token, test for proper encoding.
|
||||
func TestTokenEncode(t *testing.T) {
|
||||
c := &ClaimSet{
|
||||
Iss: iss,
|
||||
Scope: scope,
|
||||
exp: time.Unix(exp, 0),
|
||||
iat: time.Unix(iat, 0),
|
||||
}
|
||||
tok := &Token{
|
||||
ClaimSet: c,
|
||||
Header: stdHeader,
|
||||
Key: privateKeyPemBytes,
|
||||
}
|
||||
enc, err := tok.Encode()
|
||||
if err != nil {
|
||||
t.Errorf("TestTokenEncode:tok.Assertion: %v", err)
|
||||
}
|
||||
if enc != tokEnc {
|
||||
t.Error("TestTokenEncode: enc != tokEnc")
|
||||
t.Errorf(" enc = %s", enc)
|
||||
t.Errorf(" tokEnc = %s", tokEnc)
|
||||
}
|
||||
}
|
||||
|
||||
// Given a well formed Token we should get back a well formed request.
|
||||
func TestBuildRequest(t *testing.T) {
|
||||
c := &ClaimSet{
|
||||
Iss: iss,
|
||||
Scope: scope,
|
||||
exp: time.Unix(exp, 0),
|
||||
iat: time.Unix(iat, 0),
|
||||
}
|
||||
tok := &Token{
|
||||
ClaimSet: c,
|
||||
Header: stdHeader,
|
||||
Key: privateKeyPemBytes,
|
||||
}
|
||||
u, v, err := tok.buildRequest()
|
||||
if err != nil {
|
||||
t.Errorf("TestBuildRequest:BuildRequest: %v", err)
|
||||
}
|
||||
if u != c.Aud {
|
||||
t.Error("TestBuildRequest: u != c.Aud")
|
||||
t.Errorf(" u = %s", u)
|
||||
t.Errorf(" c.Aud = %s", c.Aud)
|
||||
}
|
||||
if v.Get("grant_type") != stdGrantType {
|
||||
t.Error("TestBuildRequest: grant_type != stdGrantType")
|
||||
t.Errorf(" grant_type = %s", v.Get("grant_type"))
|
||||
t.Errorf(" stdGrantType = %s", stdGrantType)
|
||||
}
|
||||
if v.Get("assertion") != tokEnc {
|
||||
t.Error("TestBuildRequest: assertion != tokEnc")
|
||||
t.Errorf(" assertion = %s", v.Get("assertion"))
|
||||
t.Errorf(" tokEnc = %s", tokEnc)
|
||||
}
|
||||
}
|
||||
|
||||
// Given a well formed access request response we should get back a oauth.Token.
|
||||
func TestHandleResponse(t *testing.T) {
|
||||
rb := &respBody{
|
||||
Access: "1/8xbJqaOZXSUZbHLl5EOtu1pxz3fmmetKx9W8CV4t79M",
|
||||
Type: "Bearer",
|
||||
ExpiresIn: 3600,
|
||||
}
|
||||
b, err := json.Marshal(rb)
|
||||
if err != nil {
|
||||
t.Errorf("TestHandleResponse:json.Marshal: %v", err)
|
||||
}
|
||||
r := &http.Response{
|
||||
Status: "200 OK",
|
||||
StatusCode: 200,
|
||||
Body: ioutil.NopCloser(bytes.NewReader(b)),
|
||||
}
|
||||
o, err := handleResponse(r)
|
||||
if err != nil {
|
||||
t.Errorf("TestHandleResponse:handleResponse: %v", err)
|
||||
}
|
||||
if o.AccessToken != rb.Access {
|
||||
t.Error("TestHandleResponse: o.AccessToken != rb.Access")
|
||||
t.Errorf(" o.AccessToken = %s", o.AccessToken)
|
||||
t.Errorf(" rb.Access = %s", rb.Access)
|
||||
}
|
||||
if o.Expired() {
|
||||
t.Error("TestHandleResponse: o.Expired == true")
|
||||
}
|
||||
}
|
||||
|
||||
// passthrough signature for test
|
||||
type FakeSigner struct{}
|
||||
|
||||
func (f FakeSigner) Sign(tok *Token) ([]byte, []byte, error) {
|
||||
block, _ := pem.Decode(privateKeyPemBytes)
|
||||
pKey, _ := x509.ParsePKCS1PrivateKey(block.Bytes)
|
||||
ss := headerEnc + "." + claimSetEnc
|
||||
h := sha256.New()
|
||||
h.Write([]byte(ss))
|
||||
b, _ := rsa.SignPKCS1v15(rand.Reader, pKey, crypto.SHA256, h.Sum(nil))
|
||||
return []byte(ss), b, nil
|
||||
}
|
||||
|
||||
// Given an external signer, get back a valid and signed JWT
|
||||
func TestExternalSigner(t *testing.T) {
|
||||
tok := NewSignerToken(iss, scope, FakeSigner{})
|
||||
enc, _ := tok.Encode()
|
||||
if enc != tokEnc {
|
||||
t.Errorf("TestExternalSigner: enc != tokEnc")
|
||||
t.Errorf(" enc = %s", enc)
|
||||
t.Errorf(" tokEnc = %s", tokEnc)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleResponseWithNewExpiry(t *testing.T) {
|
||||
rb := &respBody{
|
||||
IdToken: tokEnc,
|
||||
}
|
||||
b, err := json.Marshal(rb)
|
||||
if err != nil {
|
||||
t.Errorf("TestHandleResponse:json.Marshal: %v", err)
|
||||
}
|
||||
r := &http.Response{
|
||||
Status: "200 OK",
|
||||
StatusCode: 200,
|
||||
Body: ioutil.NopCloser(bytes.NewReader(b)),
|
||||
}
|
||||
o, err := handleResponse(r)
|
||||
if err != nil {
|
||||
t.Errorf("TestHandleResponse:handleResponse: %v", err)
|
||||
}
|
||||
if o.Expiry != time.Unix(exp, 0) {
|
||||
t.Error("TestHandleResponse: o.Expiry != exp")
|
||||
t.Errorf(" o.Expiry = %s", o.Expiry)
|
||||
t.Errorf(" exp = %s", time.Unix(exp, 0))
|
||||
}
|
||||
}
|
||||
|
||||
// Placeholder for future Assert tests.
|
||||
func TestAssert(t *testing.T) {
|
||||
// Since this method makes a call to BuildRequest, an htttp.Client, and
|
||||
// finally HandleResponse there is not much more to test. This is here
|
||||
// as a placeholder if that changes.
|
||||
}
|
||||
|
||||
// Benchmark for the end-to-end encoding of a well formed token.
|
||||
func BenchmarkTokenEncode(b *testing.B) {
|
||||
b.StopTimer()
|
||||
c := &ClaimSet{
|
||||
Iss: iss,
|
||||
Scope: scope,
|
||||
exp: time.Unix(exp, 0),
|
||||
iat: time.Unix(iat, 0),
|
||||
}
|
||||
tok := &Token{
|
||||
ClaimSet: c,
|
||||
Key: privateKeyPemBytes,
|
||||
}
|
||||
b.StartTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
tok.Encode()
|
||||
}
|
||||
}
|
||||
236
Godeps/_workspace/src/code.google.com/p/goauth2/oauth/oauth_test.go
generated
vendored
236
Godeps/_workspace/src/code.google.com/p/goauth2/oauth/oauth_test.go
generated
vendored
@@ -1,236 +0,0 @@
|
||||
// Copyright 2011 The goauth2 Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package oauth
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
var requests = []struct {
|
||||
path, query, auth string // request
|
||||
contenttype, body string // response
|
||||
}{
|
||||
{
|
||||
path: "/token",
|
||||
query: "grant_type=authorization_code&code=c0d3&client_id=cl13nt1d",
|
||||
contenttype: "application/json",
|
||||
auth: "Basic Y2wxM250MWQ6czNjcjN0",
|
||||
body: `
|
||||
{
|
||||
"access_token":"token1",
|
||||
"refresh_token":"refreshtoken1",
|
||||
"id_token":"idtoken1",
|
||||
"expires_in":3600
|
||||
}
|
||||
`,
|
||||
},
|
||||
{path: "/secure", auth: "Bearer token1", body: "first payload"},
|
||||
{
|
||||
path: "/token",
|
||||
query: "grant_type=refresh_token&refresh_token=refreshtoken1&client_id=cl13nt1d",
|
||||
contenttype: "application/json",
|
||||
auth: "Basic Y2wxM250MWQ6czNjcjN0",
|
||||
body: `
|
||||
{
|
||||
"access_token":"token2",
|
||||
"refresh_token":"refreshtoken2",
|
||||
"id_token":"idtoken2",
|
||||
"expires_in":3600
|
||||
}
|
||||
`,
|
||||
},
|
||||
{path: "/secure", auth: "Bearer token2", body: "second payload"},
|
||||
{
|
||||
path: "/token",
|
||||
query: "grant_type=refresh_token&refresh_token=refreshtoken2&client_id=cl13nt1d",
|
||||
contenttype: "application/x-www-form-urlencoded",
|
||||
body: "access_token=token3&refresh_token=refreshtoken3&id_token=idtoken3&expires_in=3600",
|
||||
auth: "Basic Y2wxM250MWQ6czNjcjN0",
|
||||
},
|
||||
{path: "/secure", auth: "Bearer token3", body: "third payload"},
|
||||
{
|
||||
path: "/token",
|
||||
query: "grant_type=client_credentials&client_id=cl13nt1d",
|
||||
contenttype: "application/json",
|
||||
auth: "Basic Y2wxM250MWQ6czNjcjN0",
|
||||
body: `
|
||||
{
|
||||
"access_token":"token4",
|
||||
"expires_in":3600
|
||||
}
|
||||
`,
|
||||
},
|
||||
{path: "/secure", auth: "Bearer token4", body: "fourth payload"},
|
||||
}
|
||||
|
||||
func TestOAuth(t *testing.T) {
|
||||
// Set up test server.
|
||||
n := 0
|
||||
handler := func(w http.ResponseWriter, r *http.Request) {
|
||||
if n >= len(requests) {
|
||||
t.Errorf("too many requests: %d", n)
|
||||
return
|
||||
}
|
||||
req := requests[n]
|
||||
n++
|
||||
|
||||
// Check request.
|
||||
if g, w := r.URL.Path, req.path; g != w {
|
||||
t.Errorf("request[%d] got path %s, want %s", n, g, w)
|
||||
}
|
||||
want, _ := url.ParseQuery(req.query)
|
||||
for k := range want {
|
||||
if g, w := r.FormValue(k), want.Get(k); g != w {
|
||||
t.Errorf("query[%s] = %s, want %s", k, g, w)
|
||||
}
|
||||
}
|
||||
if g, w := r.Header.Get("Authorization"), req.auth; w != "" && g != w {
|
||||
t.Errorf("Authorization: %v, want %v", g, w)
|
||||
}
|
||||
|
||||
// Send response.
|
||||
w.Header().Set("Content-Type", req.contenttype)
|
||||
io.WriteString(w, req.body)
|
||||
}
|
||||
server := httptest.NewServer(http.HandlerFunc(handler))
|
||||
defer server.Close()
|
||||
|
||||
config := &Config{
|
||||
ClientId: "cl13nt1d",
|
||||
ClientSecret: "s3cr3t",
|
||||
Scope: "https://example.net/scope",
|
||||
AuthURL: server.URL + "/auth",
|
||||
TokenURL: server.URL + "/token",
|
||||
}
|
||||
|
||||
// TODO(adg): test AuthCodeURL
|
||||
|
||||
transport := &Transport{Config: config}
|
||||
_, err := transport.Exchange("c0d3")
|
||||
if err != nil {
|
||||
t.Fatalf("Exchange: %v", err)
|
||||
}
|
||||
checkToken(t, transport.Token, "token1", "refreshtoken1", "idtoken1")
|
||||
|
||||
c := transport.Client()
|
||||
resp, err := c.Get(server.URL + "/secure")
|
||||
if err != nil {
|
||||
t.Fatalf("Get: %v", err)
|
||||
}
|
||||
checkBody(t, resp, "first payload")
|
||||
|
||||
// test automatic refresh
|
||||
transport.Expiry = time.Now().Add(-time.Hour)
|
||||
resp, err = c.Get(server.URL + "/secure")
|
||||
if err != nil {
|
||||
t.Fatalf("Get: %v", err)
|
||||
}
|
||||
checkBody(t, resp, "second payload")
|
||||
checkToken(t, transport.Token, "token2", "refreshtoken2", "idtoken2")
|
||||
|
||||
// refresh one more time, but get URL-encoded token instead of JSON
|
||||
transport.Expiry = time.Now().Add(-time.Hour)
|
||||
resp, err = c.Get(server.URL + "/secure")
|
||||
if err != nil {
|
||||
t.Fatalf("Get: %v", err)
|
||||
}
|
||||
checkBody(t, resp, "third payload")
|
||||
checkToken(t, transport.Token, "token3", "refreshtoken3", "idtoken3")
|
||||
|
||||
transport.Token = &Token{}
|
||||
err = transport.AuthenticateClient()
|
||||
if err != nil {
|
||||
t.Fatalf("AuthenticateClient: %v", err)
|
||||
}
|
||||
checkToken(t, transport.Token, "token4", "", "")
|
||||
resp, err = c.Get(server.URL + "/secure")
|
||||
if err != nil {
|
||||
t.Fatalf("Get: %v", err)
|
||||
}
|
||||
checkBody(t, resp, "fourth payload")
|
||||
}
|
||||
|
||||
func checkToken(t *testing.T, tok *Token, access, refresh, id string) {
|
||||
if g, w := tok.AccessToken, access; g != w {
|
||||
t.Errorf("AccessToken = %q, want %q", g, w)
|
||||
}
|
||||
if g, w := tok.RefreshToken, refresh; g != w {
|
||||
t.Errorf("RefreshToken = %q, want %q", g, w)
|
||||
}
|
||||
if g, w := tok.Extra["id_token"], id; g != w {
|
||||
t.Errorf("Extra['id_token'] = %q, want %q", g, w)
|
||||
}
|
||||
if tok.Expiry.IsZero() {
|
||||
t.Errorf("Expiry is zero; want ~1 hour")
|
||||
} else {
|
||||
exp := tok.Expiry.Sub(time.Now())
|
||||
const slop = 3 * time.Second // time moving during test
|
||||
if (time.Hour-slop) > exp || exp > time.Hour {
|
||||
t.Errorf("Expiry = %v, want ~1 hour", exp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func checkBody(t *testing.T, r *http.Response, body string) {
|
||||
b, err := ioutil.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
t.Errorf("reading reponse body: %v, want %q", err, body)
|
||||
}
|
||||
if g, w := string(b), body; g != w {
|
||||
t.Errorf("request body mismatch: got %q, want %q", g, w)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCachePermissions(t *testing.T) {
|
||||
if runtime.GOOS == "windows" {
|
||||
// Windows doesn't support file mode bits.
|
||||
return
|
||||
}
|
||||
|
||||
td, err := ioutil.TempDir("", "oauth-test")
|
||||
if err != nil {
|
||||
t.Fatalf("ioutil.TempDir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(td)
|
||||
tempFile := filepath.Join(td, "cache-file")
|
||||
|
||||
cf := CacheFile(tempFile)
|
||||
if err := cf.PutToken(new(Token)); err != nil {
|
||||
t.Fatalf("PutToken: %v", err)
|
||||
}
|
||||
fi, err := os.Stat(tempFile)
|
||||
if err != nil {
|
||||
t.Fatalf("os.Stat: %v", err)
|
||||
}
|
||||
if fi.Mode()&0077 != 0 {
|
||||
t.Errorf("Created cache file has mode %#o, want non-accessible to group+other", fi.Mode())
|
||||
}
|
||||
}
|
||||
|
||||
func TestTokenExpired(t *testing.T) {
|
||||
tests := []struct {
|
||||
token Token
|
||||
expired bool
|
||||
}{
|
||||
{Token{AccessToken: "foo"}, false},
|
||||
{Token{AccessToken: ""}, true},
|
||||
{Token{AccessToken: "foo", Expiry: time.Now().Add(-1 * time.Hour)}, true},
|
||||
{Token{AccessToken: "foo", Expiry: time.Now().Add(1 * time.Hour)}, false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
if got := tt.token.Expired(); got != tt.expired {
|
||||
t.Errorf("token %+v Expired = %v; want %v", tt.token, got, !got)
|
||||
}
|
||||
}
|
||||
}
|
||||
45
Godeps/_workspace/src/github.com/blang/semver/json_test.go
generated
vendored
45
Godeps/_workspace/src/github.com/blang/semver/json_test.go
generated
vendored
@@ -1,45 +0,0 @@
|
||||
package semver
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestJSONMarshal(t *testing.T) {
|
||||
versionString := "3.1.4-alpha.1.5.9+build.2.6.5"
|
||||
v, err := Parse(versionString)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
versionJSON, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
quotedVersionString := strconv.Quote(versionString)
|
||||
|
||||
if string(versionJSON) != quotedVersionString {
|
||||
t.Fatalf("JSON marshaled semantic version not equal: expected %q, got %q", quotedVersionString, string(versionJSON))
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONUnmarshal(t *testing.T) {
|
||||
versionString := "3.1.4-alpha.1.5.9+build.2.6.5"
|
||||
quotedVersionString := strconv.Quote(versionString)
|
||||
|
||||
var v Version
|
||||
if err := json.Unmarshal([]byte(quotedVersionString), &v); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if v.String() != versionString {
|
||||
t.Fatalf("JSON unmarshaled semantic version not equal: expected %q, got %q", versionString, v.String())
|
||||
}
|
||||
|
||||
badVersionString := strconv.Quote("3.1.4.1.5.9.2.6.5-other-digits-of-pi")
|
||||
if err := json.Unmarshal([]byte(badVersionString), &v); err == nil {
|
||||
t.Fatal("expected JSON unmarshal error, got nil")
|
||||
}
|
||||
}
|
||||
417
Godeps/_workspace/src/github.com/blang/semver/semver_test.go
generated
vendored
417
Godeps/_workspace/src/github.com/blang/semver/semver_test.go
generated
vendored
@@ -1,417 +0,0 @@
|
||||
package semver
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func prstr(s string) PRVersion {
|
||||
return PRVersion{s, 0, false}
|
||||
}
|
||||
|
||||
func prnum(i uint64) PRVersion {
|
||||
return PRVersion{"", i, true}
|
||||
}
|
||||
|
||||
type formatTest struct {
|
||||
v Version
|
||||
result string
|
||||
}
|
||||
|
||||
var formatTests = []formatTest{
|
||||
{Version{1, 2, 3, nil, nil}, "1.2.3"},
|
||||
{Version{0, 0, 1, nil, nil}, "0.0.1"},
|
||||
{Version{0, 0, 1, []PRVersion{prstr("alpha"), prstr("preview")}, []string{"123", "456"}}, "0.0.1-alpha.preview+123.456"},
|
||||
{Version{1, 2, 3, []PRVersion{prstr("alpha"), prnum(1)}, []string{"123", "456"}}, "1.2.3-alpha.1+123.456"},
|
||||
{Version{1, 2, 3, []PRVersion{prstr("alpha"), prnum(1)}, nil}, "1.2.3-alpha.1"},
|
||||
{Version{1, 2, 3, nil, []string{"123", "456"}}, "1.2.3+123.456"},
|
||||
// Prereleases and build metadata hyphens
|
||||
{Version{1, 2, 3, []PRVersion{prstr("alpha"), prstr("b-eta")}, []string{"123", "b-uild"}}, "1.2.3-alpha.b-eta+123.b-uild"},
|
||||
{Version{1, 2, 3, nil, []string{"123", "b-uild"}}, "1.2.3+123.b-uild"},
|
||||
{Version{1, 2, 3, []PRVersion{prstr("alpha"), prstr("b-eta")}, nil}, "1.2.3-alpha.b-eta"},
|
||||
}
|
||||
|
||||
func TestStringer(t *testing.T) {
|
||||
for _, test := range formatTests {
|
||||
if res := test.v.String(); res != test.result {
|
||||
t.Errorf("Stringer, expected %q but got %q", test.result, res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
for _, test := range formatTests {
|
||||
if v, err := Parse(test.result); err != nil {
|
||||
t.Errorf("Error parsing %q: %q", test.result, err)
|
||||
} else if comp := v.Compare(test.v); comp != 0 {
|
||||
t.Errorf("Parsing, expected %q but got %q, comp: %d ", test.v, v, comp)
|
||||
} else if err := v.Validate(); err != nil {
|
||||
t.Errorf("Error validating parsed version %q: %q", test.v, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMustParse(t *testing.T) {
|
||||
_ = MustParse("32.2.1-alpha")
|
||||
}
|
||||
|
||||
func TestMustParse_panic(t *testing.T) {
|
||||
defer func() {
|
||||
if recover() == nil {
|
||||
t.Errorf("Should have panicked")
|
||||
}
|
||||
}()
|
||||
_ = MustParse("invalid version")
|
||||
}
|
||||
|
||||
func TestValidate(t *testing.T) {
|
||||
for _, test := range formatTests {
|
||||
if err := test.v.Validate(); err != nil {
|
||||
t.Errorf("Error validating %q: %q", test.v, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type compareTest struct {
|
||||
v1 Version
|
||||
v2 Version
|
||||
result int
|
||||
}
|
||||
|
||||
var compareTests = []compareTest{
|
||||
{Version{1, 0, 0, nil, nil}, Version{1, 0, 0, nil, nil}, 0},
|
||||
{Version{2, 0, 0, nil, nil}, Version{1, 0, 0, nil, nil}, 1},
|
||||
{Version{0, 1, 0, nil, nil}, Version{0, 1, 0, nil, nil}, 0},
|
||||
{Version{0, 2, 0, nil, nil}, Version{0, 1, 0, nil, nil}, 1},
|
||||
{Version{0, 0, 1, nil, nil}, Version{0, 0, 1, nil, nil}, 0},
|
||||
{Version{0, 0, 2, nil, nil}, Version{0, 0, 1, nil, nil}, 1},
|
||||
{Version{1, 2, 3, nil, nil}, Version{1, 2, 3, nil, nil}, 0},
|
||||
{Version{2, 2, 4, nil, nil}, Version{1, 2, 4, nil, nil}, 1},
|
||||
{Version{1, 3, 3, nil, nil}, Version{1, 2, 3, nil, nil}, 1},
|
||||
{Version{1, 2, 4, nil, nil}, Version{1, 2, 3, nil, nil}, 1},
|
||||
|
||||
// Spec Examples #11
|
||||
{Version{1, 0, 0, nil, nil}, Version{2, 0, 0, nil, nil}, -1},
|
||||
{Version{2, 0, 0, nil, nil}, Version{2, 1, 0, nil, nil}, -1},
|
||||
{Version{2, 1, 0, nil, nil}, Version{2, 1, 1, nil, nil}, -1},
|
||||
|
||||
// Spec Examples #9
|
||||
{Version{1, 0, 0, nil, nil}, Version{1, 0, 0, []PRVersion{prstr("alpha")}, nil}, 1},
|
||||
{Version{1, 0, 0, []PRVersion{prstr("alpha")}, nil}, Version{1, 0, 0, []PRVersion{prstr("alpha"), prnum(1)}, nil}, -1},
|
||||
{Version{1, 0, 0, []PRVersion{prstr("alpha"), prnum(1)}, nil}, Version{1, 0, 0, []PRVersion{prstr("alpha"), prstr("beta")}, nil}, -1},
|
||||
{Version{1, 0, 0, []PRVersion{prstr("alpha"), prstr("beta")}, nil}, Version{1, 0, 0, []PRVersion{prstr("beta")}, nil}, -1},
|
||||
{Version{1, 0, 0, []PRVersion{prstr("beta")}, nil}, Version{1, 0, 0, []PRVersion{prstr("beta"), prnum(2)}, nil}, -1},
|
||||
{Version{1, 0, 0, []PRVersion{prstr("beta"), prnum(2)}, nil}, Version{1, 0, 0, []PRVersion{prstr("beta"), prnum(11)}, nil}, -1},
|
||||
{Version{1, 0, 0, []PRVersion{prstr("beta"), prnum(11)}, nil}, Version{1, 0, 0, []PRVersion{prstr("rc"), prnum(1)}, nil}, -1},
|
||||
{Version{1, 0, 0, []PRVersion{prstr("rc"), prnum(1)}, nil}, Version{1, 0, 0, nil, nil}, -1},
|
||||
|
||||
// Ignore Build metadata
|
||||
{Version{1, 0, 0, nil, []string{"1", "2", "3"}}, Version{1, 0, 0, nil, nil}, 0},
|
||||
}
|
||||
|
||||
func TestCompare(t *testing.T) {
|
||||
for _, test := range compareTests {
|
||||
if res := test.v1.Compare(test.v2); res != test.result {
|
||||
t.Errorf("Comparing %q : %q, expected %d but got %d", test.v1, test.v2, test.result, res)
|
||||
}
|
||||
//Test counterpart
|
||||
if res := test.v2.Compare(test.v1); res != -test.result {
|
||||
t.Errorf("Comparing %q : %q, expected %d but got %d", test.v2, test.v1, -test.result, res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type wrongformatTest struct {
|
||||
v *Version
|
||||
str string
|
||||
}
|
||||
|
||||
var wrongformatTests = []wrongformatTest{
|
||||
{nil, ""},
|
||||
{nil, "."},
|
||||
{nil, "1."},
|
||||
{nil, ".1"},
|
||||
{nil, "a.b.c"},
|
||||
{nil, "1.a.b"},
|
||||
{nil, "1.1.a"},
|
||||
{nil, "1.a.1"},
|
||||
{nil, "a.1.1"},
|
||||
{nil, ".."},
|
||||
{nil, "1.."},
|
||||
{nil, "1.1."},
|
||||
{nil, "1..1"},
|
||||
{nil, "1.1.+123"},
|
||||
{nil, "1.1.-beta"},
|
||||
{nil, "-1.1.1"},
|
||||
{nil, "1.-1.1"},
|
||||
{nil, "1.1.-1"},
|
||||
// giant numbers
|
||||
{nil, "20000000000000000000.1.1"},
|
||||
{nil, "1.20000000000000000000.1"},
|
||||
{nil, "1.1.20000000000000000000"},
|
||||
{nil, "1.1.1-20000000000000000000"},
|
||||
// Leading zeroes
|
||||
{nil, "01.1.1"},
|
||||
{nil, "001.1.1"},
|
||||
{nil, "1.01.1"},
|
||||
{nil, "1.001.1"},
|
||||
{nil, "1.1.01"},
|
||||
{nil, "1.1.001"},
|
||||
{nil, "1.1.1-01"},
|
||||
{nil, "1.1.1-001"},
|
||||
{nil, "1.1.1-beta.01"},
|
||||
{nil, "1.1.1-beta.001"},
|
||||
{&Version{0, 0, 0, []PRVersion{prstr("!")}, nil}, "0.0.0-!"},
|
||||
{&Version{0, 0, 0, nil, []string{"!"}}, "0.0.0+!"},
|
||||
// empty prversion
|
||||
{&Version{0, 0, 0, []PRVersion{prstr(""), prstr("alpha")}, nil}, "0.0.0-.alpha"},
|
||||
// empty build meta data
|
||||
{&Version{0, 0, 0, []PRVersion{prstr("alpha")}, []string{""}}, "0.0.0-alpha+"},
|
||||
{&Version{0, 0, 0, []PRVersion{prstr("alpha")}, []string{"test", ""}}, "0.0.0-alpha+test."},
|
||||
}
|
||||
|
||||
func TestWrongFormat(t *testing.T) {
|
||||
for _, test := range wrongformatTests {
|
||||
|
||||
if res, err := Parse(test.str); err == nil {
|
||||
t.Errorf("Parsing wrong format version %q, expected error but got %q", test.str, res)
|
||||
}
|
||||
|
||||
if test.v != nil {
|
||||
if err := test.v.Validate(); err == nil {
|
||||
t.Errorf("Validating wrong format version %q (%q), expected error", test.v, test.str)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompareHelper(t *testing.T) {
|
||||
v := Version{1, 0, 0, []PRVersion{prstr("alpha")}, nil}
|
||||
v1 := Version{1, 0, 0, nil, nil}
|
||||
if !v.EQ(v) {
|
||||
t.Errorf("%q should be equal to %q", v, v)
|
||||
}
|
||||
if !v.Equals(v) {
|
||||
t.Errorf("%q should be equal to %q", v, v)
|
||||
}
|
||||
if !v1.NE(v) {
|
||||
t.Errorf("%q should not be equal to %q", v1, v)
|
||||
}
|
||||
if !v.GTE(v) {
|
||||
t.Errorf("%q should be greater than or equal to %q", v, v)
|
||||
}
|
||||
if !v.LTE(v) {
|
||||
t.Errorf("%q should be less than or equal to %q", v, v)
|
||||
}
|
||||
if !v.LT(v1) {
|
||||
t.Errorf("%q should be less than %q", v, v1)
|
||||
}
|
||||
if !v.LTE(v1) {
|
||||
t.Errorf("%q should be less than or equal %q", v, v1)
|
||||
}
|
||||
if !v.LE(v1) {
|
||||
t.Errorf("%q should be less than or equal %q", v, v1)
|
||||
}
|
||||
if !v1.GT(v) {
|
||||
t.Errorf("%q should be greater than %q", v1, v)
|
||||
}
|
||||
if !v1.GTE(v) {
|
||||
t.Errorf("%q should be greater than or equal %q", v1, v)
|
||||
}
|
||||
if !v1.GE(v) {
|
||||
t.Errorf("%q should be greater than or equal %q", v1, v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPreReleaseVersions(t *testing.T) {
|
||||
p1, err := NewPRVersion("123")
|
||||
if !p1.IsNumeric() {
|
||||
t.Errorf("Expected numeric prversion, got %q", p1)
|
||||
}
|
||||
if p1.VersionNum != 123 {
|
||||
t.Error("Wrong prversion number")
|
||||
}
|
||||
if err != nil {
|
||||
t.Errorf("Not expected error %q", err)
|
||||
}
|
||||
p2, err := NewPRVersion("alpha")
|
||||
if p2.IsNumeric() {
|
||||
t.Errorf("Expected non-numeric prversion, got %q", p2)
|
||||
}
|
||||
if p2.VersionStr != "alpha" {
|
||||
t.Error("Wrong prversion string")
|
||||
}
|
||||
if err != nil {
|
||||
t.Errorf("Not expected error %q", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildMetaDataVersions(t *testing.T) {
|
||||
_, err := NewBuildVersion("123")
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error %q", err)
|
||||
}
|
||||
|
||||
_, err = NewBuildVersion("build")
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error %q", err)
|
||||
}
|
||||
|
||||
_, err = NewBuildVersion("test?")
|
||||
if err == nil {
|
||||
t.Error("Expected error, got none")
|
||||
}
|
||||
|
||||
_, err = NewBuildVersion("")
|
||||
if err == nil {
|
||||
t.Error("Expected error, got none")
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewHelper(t *testing.T) {
|
||||
v, err := New("1.2.3")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error %q", err)
|
||||
}
|
||||
|
||||
// New returns pointer
|
||||
if v == nil {
|
||||
t.Fatal("Version is nil")
|
||||
}
|
||||
if v.Compare(Version{1, 2, 3, nil, nil}) != 0 {
|
||||
t.Fatal("Unexpected comparison problem")
|
||||
}
|
||||
}
|
||||
|
||||
func TestMakeHelper(t *testing.T) {
|
||||
v, err := Make("1.2.3")
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error %q", err)
|
||||
}
|
||||
if v.Compare(Version{1, 2, 3, nil, nil}) != 0 {
|
||||
t.Fatal("Unexpected comparison problem")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkParseSimple(b *testing.B) {
|
||||
const VERSION = "0.0.1"
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
Parse(VERSION)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkParseComplex(b *testing.B) {
|
||||
const VERSION = "0.0.1-alpha.preview+123.456"
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
Parse(VERSION)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkParseAverage(b *testing.B) {
|
||||
l := len(formatTests)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
Parse(formatTests[n%l].result)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkStringSimple(b *testing.B) {
|
||||
const VERSION = "0.0.1"
|
||||
v, _ := Parse(VERSION)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
v.String()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkStringLarger(b *testing.B) {
|
||||
const VERSION = "11.15.2012"
|
||||
v, _ := Parse(VERSION)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
v.String()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkStringComplex(b *testing.B) {
|
||||
const VERSION = "0.0.1-alpha.preview+123.456"
|
||||
v, _ := Parse(VERSION)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
v.String()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkStringAverage(b *testing.B) {
|
||||
l := len(formatTests)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
formatTests[n%l].v.String()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkValidateSimple(b *testing.B) {
|
||||
const VERSION = "0.0.1"
|
||||
v, _ := Parse(VERSION)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
v.Validate()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkValidateComplex(b *testing.B) {
|
||||
const VERSION = "0.0.1-alpha.preview+123.456"
|
||||
v, _ := Parse(VERSION)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
v.Validate()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkValidateAverage(b *testing.B) {
|
||||
l := len(formatTests)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
formatTests[n%l].v.Validate()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCompareSimple(b *testing.B) {
|
||||
const VERSION = "0.0.1"
|
||||
v, _ := Parse(VERSION)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
v.Compare(v)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCompareComplex(b *testing.B) {
|
||||
const VERSION = "0.0.1-alpha.preview+123.456"
|
||||
v, _ := Parse(VERSION)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
v.Compare(v)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCompareAverage(b *testing.B) {
|
||||
l := len(compareTests)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
compareTests[n%l].v1.Compare((compareTests[n%l].v2))
|
||||
}
|
||||
}
|
||||
30
Godeps/_workspace/src/github.com/blang/semver/sort_test.go
generated
vendored
30
Godeps/_workspace/src/github.com/blang/semver/sort_test.go
generated
vendored
@@ -1,30 +0,0 @@
|
||||
package semver
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSort(t *testing.T) {
|
||||
v100, _ := Parse("1.0.0")
|
||||
v010, _ := Parse("0.1.0")
|
||||
v001, _ := Parse("0.0.1")
|
||||
versions := []Version{v010, v100, v001}
|
||||
Sort(versions)
|
||||
|
||||
correct := []Version{v001, v010, v100}
|
||||
if !reflect.DeepEqual(versions, correct) {
|
||||
t.Fatalf("Sort returned wrong order: %s", versions)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSort(b *testing.B) {
|
||||
v100, _ := Parse("1.0.0")
|
||||
v010, _ := Parse("0.1.0")
|
||||
v001, _ := Parse("0.0.1")
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
Sort([]Version{v010, v100, v001})
|
||||
}
|
||||
}
|
||||
38
Godeps/_workspace/src/github.com/blang/semver/sql_test.go
generated
vendored
38
Godeps/_workspace/src/github.com/blang/semver/sql_test.go
generated
vendored
@@ -1,38 +0,0 @@
|
||||
package semver
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
type scanTest struct {
|
||||
val interface{}
|
||||
shouldError bool
|
||||
expected string
|
||||
}
|
||||
|
||||
var scanTests = []scanTest{
|
||||
scanTest{"1.2.3", false, "1.2.3"},
|
||||
scanTest{[]byte("1.2.3"), false, "1.2.3"},
|
||||
scanTest{7, true, ""},
|
||||
scanTest{7e4, true, ""},
|
||||
scanTest{true, true, ""},
|
||||
}
|
||||
|
||||
func TestScanString(t *testing.T) {
|
||||
for _, tc := range scanTests {
|
||||
s := &Version{}
|
||||
err := s.Scan(tc.val)
|
||||
if tc.shouldError {
|
||||
if err == nil {
|
||||
t.Fatalf("Scan did not return an error on %v (%T)", tc.val, tc.val)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Fatalf("Scan returned an unexpected error: %s (%T) on %v (%T)", tc.val, tc.val, tc.val, tc.val)
|
||||
}
|
||||
if val, _ := s.Value(); val != tc.expected {
|
||||
t.Errorf("Wrong Value returned, expected %q, got %q", tc.expected, val)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
43
Godeps/_workspace/src/github.com/golang/protobuf/proto/Makefile
generated
vendored
Normal file
43
Godeps/_workspace/src/github.com/golang/protobuf/proto/Makefile
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# Go support for Protocol Buffers - Google's data interchange format
|
||||
#
|
||||
# Copyright 2010 The Go Authors. All rights reserved.
|
||||
# https://github.com/golang/protobuf
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
install:
|
||||
go install
|
||||
|
||||
test: install generate-test-pbs
|
||||
go test
|
||||
|
||||
|
||||
generate-test-pbs:
|
||||
make install
|
||||
make -C testdata
|
||||
protoc --go_out=Mtestdata/test.proto=github.com/golang/protobuf/proto/testdata:. proto3_proto/proto3.proto
|
||||
make
|
||||
223
Godeps/_workspace/src/github.com/golang/protobuf/proto/clone.go
generated
vendored
Normal file
223
Godeps/_workspace/src/github.com/golang/protobuf/proto/clone.go
generated
vendored
Normal file
@@ -0,0 +1,223 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Protocol buffer deep copy and merge.
|
||||
// TODO: MessageSet and RawMessage.
|
||||
|
||||
package proto
|
||||
|
||||
import (
|
||||
"log"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Clone returns a deep copy of a protocol buffer.
|
||||
func Clone(pb Message) Message {
|
||||
in := reflect.ValueOf(pb)
|
||||
if in.IsNil() {
|
||||
return pb
|
||||
}
|
||||
|
||||
out := reflect.New(in.Type().Elem())
|
||||
// out is empty so a merge is a deep copy.
|
||||
mergeStruct(out.Elem(), in.Elem())
|
||||
return out.Interface().(Message)
|
||||
}
|
||||
|
||||
// Merge merges src into dst.
|
||||
// Required and optional fields that are set in src will be set to that value in dst.
|
||||
// Elements of repeated fields will be appended.
|
||||
// Merge panics if src and dst are not the same type, or if dst is nil.
|
||||
func Merge(dst, src Message) {
|
||||
in := reflect.ValueOf(src)
|
||||
out := reflect.ValueOf(dst)
|
||||
if out.IsNil() {
|
||||
panic("proto: nil destination")
|
||||
}
|
||||
if in.Type() != out.Type() {
|
||||
// Explicit test prior to mergeStruct so that mistyped nils will fail
|
||||
panic("proto: type mismatch")
|
||||
}
|
||||
if in.IsNil() {
|
||||
// Merging nil into non-nil is a quiet no-op
|
||||
return
|
||||
}
|
||||
mergeStruct(out.Elem(), in.Elem())
|
||||
}
|
||||
|
||||
func mergeStruct(out, in reflect.Value) {
|
||||
sprop := GetProperties(in.Type())
|
||||
for i := 0; i < in.NumField(); i++ {
|
||||
f := in.Type().Field(i)
|
||||
if strings.HasPrefix(f.Name, "XXX_") {
|
||||
continue
|
||||
}
|
||||
mergeAny(out.Field(i), in.Field(i), false, sprop.Prop[i])
|
||||
}
|
||||
|
||||
if emIn, ok := in.Addr().Interface().(extendableProto); ok {
|
||||
emOut := out.Addr().Interface().(extendableProto)
|
||||
mergeExtension(emOut.ExtensionMap(), emIn.ExtensionMap())
|
||||
}
|
||||
|
||||
uf := in.FieldByName("XXX_unrecognized")
|
||||
if !uf.IsValid() {
|
||||
return
|
||||
}
|
||||
uin := uf.Bytes()
|
||||
if len(uin) > 0 {
|
||||
out.FieldByName("XXX_unrecognized").SetBytes(append([]byte(nil), uin...))
|
||||
}
|
||||
}
|
||||
|
||||
// mergeAny performs a merge between two values of the same type.
|
||||
// viaPtr indicates whether the values were indirected through a pointer (implying proto2).
|
||||
// prop is set if this is a struct field (it may be nil).
|
||||
func mergeAny(out, in reflect.Value, viaPtr bool, prop *Properties) {
|
||||
if in.Type() == protoMessageType {
|
||||
if !in.IsNil() {
|
||||
if out.IsNil() {
|
||||
out.Set(reflect.ValueOf(Clone(in.Interface().(Message))))
|
||||
} else {
|
||||
Merge(out.Interface().(Message), in.Interface().(Message))
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
switch in.Kind() {
|
||||
case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64,
|
||||
reflect.String, reflect.Uint32, reflect.Uint64:
|
||||
if !viaPtr && isProto3Zero(in) {
|
||||
return
|
||||
}
|
||||
out.Set(in)
|
||||
case reflect.Interface:
|
||||
// Probably a oneof field; copy non-nil values.
|
||||
if in.IsNil() {
|
||||
return
|
||||
}
|
||||
// Allocate destination if it is not set, or set to a different type.
|
||||
// Otherwise we will merge as normal.
|
||||
if out.IsNil() || out.Elem().Type() != in.Elem().Type() {
|
||||
out.Set(reflect.New(in.Elem().Elem().Type())) // interface -> *T -> T -> new(T)
|
||||
}
|
||||
mergeAny(out.Elem(), in.Elem(), false, nil)
|
||||
case reflect.Map:
|
||||
if in.Len() == 0 {
|
||||
return
|
||||
}
|
||||
if out.IsNil() {
|
||||
out.Set(reflect.MakeMap(in.Type()))
|
||||
}
|
||||
// For maps with value types of *T or []byte we need to deep copy each value.
|
||||
elemKind := in.Type().Elem().Kind()
|
||||
for _, key := range in.MapKeys() {
|
||||
var val reflect.Value
|
||||
switch elemKind {
|
||||
case reflect.Ptr:
|
||||
val = reflect.New(in.Type().Elem().Elem())
|
||||
mergeAny(val, in.MapIndex(key), false, nil)
|
||||
case reflect.Slice:
|
||||
val = in.MapIndex(key)
|
||||
val = reflect.ValueOf(append([]byte{}, val.Bytes()...))
|
||||
default:
|
||||
val = in.MapIndex(key)
|
||||
}
|
||||
out.SetMapIndex(key, val)
|
||||
}
|
||||
case reflect.Ptr:
|
||||
if in.IsNil() {
|
||||
return
|
||||
}
|
||||
if out.IsNil() {
|
||||
out.Set(reflect.New(in.Elem().Type()))
|
||||
}
|
||||
mergeAny(out.Elem(), in.Elem(), true, nil)
|
||||
case reflect.Slice:
|
||||
if in.IsNil() {
|
||||
return
|
||||
}
|
||||
if in.Type().Elem().Kind() == reflect.Uint8 {
|
||||
// []byte is a scalar bytes field, not a repeated field.
|
||||
|
||||
// Edge case: if this is in a proto3 message, a zero length
|
||||
// bytes field is considered the zero value, and should not
|
||||
// be merged.
|
||||
if prop != nil && prop.proto3 && in.Len() == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Make a deep copy.
|
||||
// Append to []byte{} instead of []byte(nil) so that we never end up
|
||||
// with a nil result.
|
||||
out.SetBytes(append([]byte{}, in.Bytes()...))
|
||||
return
|
||||
}
|
||||
n := in.Len()
|
||||
if out.IsNil() {
|
||||
out.Set(reflect.MakeSlice(in.Type(), 0, n))
|
||||
}
|
||||
switch in.Type().Elem().Kind() {
|
||||
case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64,
|
||||
reflect.String, reflect.Uint32, reflect.Uint64:
|
||||
out.Set(reflect.AppendSlice(out, in))
|
||||
default:
|
||||
for i := 0; i < n; i++ {
|
||||
x := reflect.Indirect(reflect.New(in.Type().Elem()))
|
||||
mergeAny(x, in.Index(i), false, nil)
|
||||
out.Set(reflect.Append(out, x))
|
||||
}
|
||||
}
|
||||
case reflect.Struct:
|
||||
mergeStruct(out, in)
|
||||
default:
|
||||
// unknown type, so not a protocol buffer
|
||||
log.Printf("proto: don't know how to copy %v", in)
|
||||
}
|
||||
}
|
||||
|
||||
func mergeExtension(out, in map[int32]Extension) {
|
||||
for extNum, eIn := range in {
|
||||
eOut := Extension{desc: eIn.desc}
|
||||
if eIn.value != nil {
|
||||
v := reflect.New(reflect.TypeOf(eIn.value)).Elem()
|
||||
mergeAny(v, reflect.ValueOf(eIn.value), false, nil)
|
||||
eOut.value = v.Interface()
|
||||
}
|
||||
if eIn.enc != nil {
|
||||
eOut.enc = make([]byte, len(eIn.enc))
|
||||
copy(eOut.enc, eIn.enc)
|
||||
}
|
||||
|
||||
out[extNum] = eOut
|
||||
}
|
||||
}
|
||||
867
Godeps/_workspace/src/github.com/golang/protobuf/proto/decode.go
generated
vendored
Normal file
867
Godeps/_workspace/src/github.com/golang/protobuf/proto/decode.go
generated
vendored
Normal file
@@ -0,0 +1,867 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package proto
|
||||
|
||||
/*
|
||||
* Routines for decoding protocol buffer data to construct in-memory representations.
|
||||
*/
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// errOverflow is returned when an integer is too large to be represented.
|
||||
var errOverflow = errors.New("proto: integer overflow")
|
||||
|
||||
// ErrInternalBadWireType is returned by generated code when an incorrect
|
||||
// wire type is encountered. It does not get returned to user code.
|
||||
var ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof")
|
||||
|
||||
// The fundamental decoders that interpret bytes on the wire.
|
||||
// Those that take integer types all return uint64 and are
|
||||
// therefore of type valueDecoder.
|
||||
|
||||
// DecodeVarint reads a varint-encoded integer from the slice.
|
||||
// It returns the integer and the number of bytes consumed, or
|
||||
// zero if there is not enough.
|
||||
// This is the format for the
|
||||
// int32, int64, uint32, uint64, bool, and enum
|
||||
// protocol buffer types.
|
||||
func DecodeVarint(buf []byte) (x uint64, n int) {
|
||||
// x, n already 0
|
||||
for shift := uint(0); shift < 64; shift += 7 {
|
||||
if n >= len(buf) {
|
||||
return 0, 0
|
||||
}
|
||||
b := uint64(buf[n])
|
||||
n++
|
||||
x |= (b & 0x7F) << shift
|
||||
if (b & 0x80) == 0 {
|
||||
return x, n
|
||||
}
|
||||
}
|
||||
|
||||
// The number is too large to represent in a 64-bit value.
|
||||
return 0, 0
|
||||
}
|
||||
|
||||
// DecodeVarint reads a varint-encoded integer from the Buffer.
|
||||
// This is the format for the
|
||||
// int32, int64, uint32, uint64, bool, and enum
|
||||
// protocol buffer types.
|
||||
func (p *Buffer) DecodeVarint() (x uint64, err error) {
|
||||
// x, err already 0
|
||||
|
||||
i := p.index
|
||||
l := len(p.buf)
|
||||
|
||||
for shift := uint(0); shift < 64; shift += 7 {
|
||||
if i >= l {
|
||||
err = io.ErrUnexpectedEOF
|
||||
return
|
||||
}
|
||||
b := p.buf[i]
|
||||
i++
|
||||
x |= (uint64(b) & 0x7F) << shift
|
||||
if b < 0x80 {
|
||||
p.index = i
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// The number is too large to represent in a 64-bit value.
|
||||
err = errOverflow
|
||||
return
|
||||
}
|
||||
|
||||
// DecodeFixed64 reads a 64-bit integer from the Buffer.
|
||||
// This is the format for the
|
||||
// fixed64, sfixed64, and double protocol buffer types.
|
||||
func (p *Buffer) DecodeFixed64() (x uint64, err error) {
|
||||
// x, err already 0
|
||||
i := p.index + 8
|
||||
if i < 0 || i > len(p.buf) {
|
||||
err = io.ErrUnexpectedEOF
|
||||
return
|
||||
}
|
||||
p.index = i
|
||||
|
||||
x = uint64(p.buf[i-8])
|
||||
x |= uint64(p.buf[i-7]) << 8
|
||||
x |= uint64(p.buf[i-6]) << 16
|
||||
x |= uint64(p.buf[i-5]) << 24
|
||||
x |= uint64(p.buf[i-4]) << 32
|
||||
x |= uint64(p.buf[i-3]) << 40
|
||||
x |= uint64(p.buf[i-2]) << 48
|
||||
x |= uint64(p.buf[i-1]) << 56
|
||||
return
|
||||
}
|
||||
|
||||
// DecodeFixed32 reads a 32-bit integer from the Buffer.
|
||||
// This is the format for the
|
||||
// fixed32, sfixed32, and float protocol buffer types.
|
||||
func (p *Buffer) DecodeFixed32() (x uint64, err error) {
|
||||
// x, err already 0
|
||||
i := p.index + 4
|
||||
if i < 0 || i > len(p.buf) {
|
||||
err = io.ErrUnexpectedEOF
|
||||
return
|
||||
}
|
||||
p.index = i
|
||||
|
||||
x = uint64(p.buf[i-4])
|
||||
x |= uint64(p.buf[i-3]) << 8
|
||||
x |= uint64(p.buf[i-2]) << 16
|
||||
x |= uint64(p.buf[i-1]) << 24
|
||||
return
|
||||
}
|
||||
|
||||
// DecodeZigzag64 reads a zigzag-encoded 64-bit integer
|
||||
// from the Buffer.
|
||||
// This is the format used for the sint64 protocol buffer type.
|
||||
func (p *Buffer) DecodeZigzag64() (x uint64, err error) {
|
||||
x, err = p.DecodeVarint()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
x = (x >> 1) ^ uint64((int64(x&1)<<63)>>63)
|
||||
return
|
||||
}
|
||||
|
||||
// DecodeZigzag32 reads a zigzag-encoded 32-bit integer
|
||||
// from the Buffer.
|
||||
// This is the format used for the sint32 protocol buffer type.
|
||||
func (p *Buffer) DecodeZigzag32() (x uint64, err error) {
|
||||
x, err = p.DecodeVarint()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
x = uint64((uint32(x) >> 1) ^ uint32((int32(x&1)<<31)>>31))
|
||||
return
|
||||
}
|
||||
|
||||
// These are not ValueDecoders: they produce an array of bytes or a string.
|
||||
// bytes, embedded messages
|
||||
|
||||
// DecodeRawBytes reads a count-delimited byte buffer from the Buffer.
|
||||
// This is the format used for the bytes protocol buffer
|
||||
// type and for embedded messages.
|
||||
func (p *Buffer) DecodeRawBytes(alloc bool) (buf []byte, err error) {
|
||||
n, err := p.DecodeVarint()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nb := int(n)
|
||||
if nb < 0 {
|
||||
return nil, fmt.Errorf("proto: bad byte length %d", nb)
|
||||
}
|
||||
end := p.index + nb
|
||||
if end < p.index || end > len(p.buf) {
|
||||
return nil, io.ErrUnexpectedEOF
|
||||
}
|
||||
|
||||
if !alloc {
|
||||
// todo: check if can get more uses of alloc=false
|
||||
buf = p.buf[p.index:end]
|
||||
p.index += nb
|
||||
return
|
||||
}
|
||||
|
||||
buf = make([]byte, nb)
|
||||
copy(buf, p.buf[p.index:])
|
||||
p.index += nb
|
||||
return
|
||||
}
|
||||
|
||||
// DecodeStringBytes reads an encoded string from the Buffer.
|
||||
// This is the format used for the proto2 string type.
|
||||
func (p *Buffer) DecodeStringBytes() (s string, err error) {
|
||||
buf, err := p.DecodeRawBytes(false)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
return string(buf), nil
|
||||
}
|
||||
|
||||
// Skip the next item in the buffer. Its wire type is decoded and presented as an argument.
|
||||
// If the protocol buffer has extensions, and the field matches, add it as an extension.
|
||||
// Otherwise, if the XXX_unrecognized field exists, append the skipped data there.
|
||||
func (o *Buffer) skipAndSave(t reflect.Type, tag, wire int, base structPointer, unrecField field) error {
|
||||
oi := o.index
|
||||
|
||||
err := o.skip(t, tag, wire)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !unrecField.IsValid() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ptr := structPointer_Bytes(base, unrecField)
|
||||
|
||||
// Add the skipped field to struct field
|
||||
obuf := o.buf
|
||||
|
||||
o.buf = *ptr
|
||||
o.EncodeVarint(uint64(tag<<3 | wire))
|
||||
*ptr = append(o.buf, obuf[oi:o.index]...)
|
||||
|
||||
o.buf = obuf
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Skip the next item in the buffer. Its wire type is decoded and presented as an argument.
|
||||
func (o *Buffer) skip(t reflect.Type, tag, wire int) error {
|
||||
|
||||
var u uint64
|
||||
var err error
|
||||
|
||||
switch wire {
|
||||
case WireVarint:
|
||||
_, err = o.DecodeVarint()
|
||||
case WireFixed64:
|
||||
_, err = o.DecodeFixed64()
|
||||
case WireBytes:
|
||||
_, err = o.DecodeRawBytes(false)
|
||||
case WireFixed32:
|
||||
_, err = o.DecodeFixed32()
|
||||
case WireStartGroup:
|
||||
for {
|
||||
u, err = o.DecodeVarint()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
fwire := int(u & 0x7)
|
||||
if fwire == WireEndGroup {
|
||||
break
|
||||
}
|
||||
ftag := int(u >> 3)
|
||||
err = o.skip(t, ftag, fwire)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
default:
|
||||
err = fmt.Errorf("proto: can't skip unknown wire type %d for %s", wire, t)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Unmarshaler is the interface representing objects that can
|
||||
// unmarshal themselves. The method should reset the receiver before
|
||||
// decoding starts. The argument points to data that may be
|
||||
// overwritten, so implementations should not keep references to the
|
||||
// buffer.
|
||||
type Unmarshaler interface {
|
||||
Unmarshal([]byte) error
|
||||
}
|
||||
|
||||
// Unmarshal parses the protocol buffer representation in buf and places the
|
||||
// decoded result in pb. If the struct underlying pb does not match
|
||||
// the data in buf, the results can be unpredictable.
|
||||
//
|
||||
// Unmarshal resets pb before starting to unmarshal, so any
|
||||
// existing data in pb is always removed. Use UnmarshalMerge
|
||||
// to preserve and append to existing data.
|
||||
func Unmarshal(buf []byte, pb Message) error {
|
||||
pb.Reset()
|
||||
return UnmarshalMerge(buf, pb)
|
||||
}
|
||||
|
||||
// UnmarshalMerge parses the protocol buffer representation in buf and
|
||||
// writes the decoded result to pb. If the struct underlying pb does not match
|
||||
// the data in buf, the results can be unpredictable.
|
||||
//
|
||||
// UnmarshalMerge merges into existing data in pb.
|
||||
// Most code should use Unmarshal instead.
|
||||
func UnmarshalMerge(buf []byte, pb Message) error {
|
||||
// If the object can unmarshal itself, let it.
|
||||
if u, ok := pb.(Unmarshaler); ok {
|
||||
return u.Unmarshal(buf)
|
||||
}
|
||||
return NewBuffer(buf).Unmarshal(pb)
|
||||
}
|
||||
|
||||
// DecodeMessage reads a count-delimited message from the Buffer.
|
||||
func (p *Buffer) DecodeMessage(pb Message) error {
|
||||
enc, err := p.DecodeRawBytes(false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return NewBuffer(enc).Unmarshal(pb)
|
||||
}
|
||||
|
||||
// DecodeGroup reads a tag-delimited group from the Buffer.
|
||||
func (p *Buffer) DecodeGroup(pb Message) error {
|
||||
typ, base, err := getbase(pb)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), true, base)
|
||||
}
|
||||
|
||||
// Unmarshal parses the protocol buffer representation in the
|
||||
// Buffer and places the decoded result in pb. If the struct
|
||||
// underlying pb does not match the data in the buffer, the results can be
|
||||
// unpredictable.
|
||||
func (p *Buffer) Unmarshal(pb Message) error {
|
||||
// If the object can unmarshal itself, let it.
|
||||
if u, ok := pb.(Unmarshaler); ok {
|
||||
err := u.Unmarshal(p.buf[p.index:])
|
||||
p.index = len(p.buf)
|
||||
return err
|
||||
}
|
||||
|
||||
typ, base, err := getbase(pb)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = p.unmarshalType(typ.Elem(), GetProperties(typ.Elem()), false, base)
|
||||
|
||||
if collectStats {
|
||||
stats.Decode++
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// unmarshalType does the work of unmarshaling a structure.
|
||||
func (o *Buffer) unmarshalType(st reflect.Type, prop *StructProperties, is_group bool, base structPointer) error {
|
||||
var state errorState
|
||||
required, reqFields := prop.reqCount, uint64(0)
|
||||
|
||||
var err error
|
||||
for err == nil && o.index < len(o.buf) {
|
||||
oi := o.index
|
||||
var u uint64
|
||||
u, err = o.DecodeVarint()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
wire := int(u & 0x7)
|
||||
if wire == WireEndGroup {
|
||||
if is_group {
|
||||
return nil // input is satisfied
|
||||
}
|
||||
return fmt.Errorf("proto: %s: wiretype end group for non-group", st)
|
||||
}
|
||||
tag := int(u >> 3)
|
||||
if tag <= 0 {
|
||||
return fmt.Errorf("proto: %s: illegal tag %d (wire type %d)", st, tag, wire)
|
||||
}
|
||||
fieldnum, ok := prop.decoderTags.get(tag)
|
||||
if !ok {
|
||||
// Maybe it's an extension?
|
||||
if prop.extendable {
|
||||
if e := structPointer_Interface(base, st).(extendableProto); isExtensionField(e, int32(tag)) {
|
||||
if err = o.skip(st, tag, wire); err == nil {
|
||||
ext := e.ExtensionMap()[int32(tag)] // may be missing
|
||||
ext.enc = append(ext.enc, o.buf[oi:o.index]...)
|
||||
e.ExtensionMap()[int32(tag)] = ext
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
// Maybe it's a oneof?
|
||||
if prop.oneofUnmarshaler != nil {
|
||||
m := structPointer_Interface(base, st).(Message)
|
||||
// First return value indicates whether tag is a oneof field.
|
||||
ok, err = prop.oneofUnmarshaler(m, tag, wire, o)
|
||||
if err == ErrInternalBadWireType {
|
||||
// Map the error to something more descriptive.
|
||||
// Do the formatting here to save generated code space.
|
||||
err = fmt.Errorf("bad wiretype for oneof field in %T", m)
|
||||
}
|
||||
if ok {
|
||||
continue
|
||||
}
|
||||
}
|
||||
err = o.skipAndSave(st, tag, wire, base, prop.unrecField)
|
||||
continue
|
||||
}
|
||||
p := prop.Prop[fieldnum]
|
||||
|
||||
if p.dec == nil {
|
||||
fmt.Fprintf(os.Stderr, "proto: no protobuf decoder for %s.%s\n", st, st.Field(fieldnum).Name)
|
||||
continue
|
||||
}
|
||||
dec := p.dec
|
||||
if wire != WireStartGroup && wire != p.WireType {
|
||||
if wire == WireBytes && p.packedDec != nil {
|
||||
// a packable field
|
||||
dec = p.packedDec
|
||||
} else {
|
||||
err = fmt.Errorf("proto: bad wiretype for field %s.%s: got wiretype %d, want %d", st, st.Field(fieldnum).Name, wire, p.WireType)
|
||||
continue
|
||||
}
|
||||
}
|
||||
decErr := dec(o, p, base)
|
||||
if decErr != nil && !state.shouldContinue(decErr, p) {
|
||||
err = decErr
|
||||
}
|
||||
if err == nil && p.Required {
|
||||
// Successfully decoded a required field.
|
||||
if tag <= 64 {
|
||||
// use bitmap for fields 1-64 to catch field reuse.
|
||||
var mask uint64 = 1 << uint64(tag-1)
|
||||
if reqFields&mask == 0 {
|
||||
// new required field
|
||||
reqFields |= mask
|
||||
required--
|
||||
}
|
||||
} else {
|
||||
// This is imprecise. It can be fooled by a required field
|
||||
// with a tag > 64 that is encoded twice; that's very rare.
|
||||
// A fully correct implementation would require allocating
|
||||
// a data structure, which we would like to avoid.
|
||||
required--
|
||||
}
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
if is_group {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
if state.err != nil {
|
||||
return state.err
|
||||
}
|
||||
if required > 0 {
|
||||
// Not enough information to determine the exact field. If we use extra
|
||||
// CPU, we could determine the field only if the missing required field
|
||||
// has a tag <= 64 and we check reqFields.
|
||||
return &RequiredNotSetError{"{Unknown}"}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Individual type decoders
|
||||
// For each,
|
||||
// u is the decoded value,
|
||||
// v is a pointer to the field (pointer) in the struct
|
||||
|
||||
// Sizes of the pools to allocate inside the Buffer.
|
||||
// The goal is modest amortization and allocation
|
||||
// on at least 16-byte boundaries.
|
||||
const (
|
||||
boolPoolSize = 16
|
||||
uint32PoolSize = 8
|
||||
uint64PoolSize = 4
|
||||
)
|
||||
|
||||
// Decode a bool.
|
||||
func (o *Buffer) dec_bool(p *Properties, base structPointer) error {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(o.bools) == 0 {
|
||||
o.bools = make([]bool, boolPoolSize)
|
||||
}
|
||||
o.bools[0] = u != 0
|
||||
*structPointer_Bool(base, p.field) = &o.bools[0]
|
||||
o.bools = o.bools[1:]
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *Buffer) dec_proto3_bool(p *Properties, base structPointer) error {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*structPointer_BoolVal(base, p.field) = u != 0
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode an int32.
|
||||
func (o *Buffer) dec_int32(p *Properties, base structPointer) error {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
word32_Set(structPointer_Word32(base, p.field), o, uint32(u))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *Buffer) dec_proto3_int32(p *Properties, base structPointer) error {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
word32Val_Set(structPointer_Word32Val(base, p.field), uint32(u))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode an int64.
|
||||
func (o *Buffer) dec_int64(p *Properties, base structPointer) error {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
word64_Set(structPointer_Word64(base, p.field), o, u)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *Buffer) dec_proto3_int64(p *Properties, base structPointer) error {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
word64Val_Set(structPointer_Word64Val(base, p.field), o, u)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a string.
|
||||
func (o *Buffer) dec_string(p *Properties, base structPointer) error {
|
||||
s, err := o.DecodeStringBytes()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*structPointer_String(base, p.field) = &s
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *Buffer) dec_proto3_string(p *Properties, base structPointer) error {
|
||||
s, err := o.DecodeStringBytes()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*structPointer_StringVal(base, p.field) = s
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a slice of bytes ([]byte).
|
||||
func (o *Buffer) dec_slice_byte(p *Properties, base structPointer) error {
|
||||
b, err := o.DecodeRawBytes(true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*structPointer_Bytes(base, p.field) = b
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a slice of bools ([]bool).
|
||||
func (o *Buffer) dec_slice_bool(p *Properties, base structPointer) error {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v := structPointer_BoolSlice(base, p.field)
|
||||
*v = append(*v, u != 0)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a slice of bools ([]bool) in packed format.
|
||||
func (o *Buffer) dec_slice_packed_bool(p *Properties, base structPointer) error {
|
||||
v := structPointer_BoolSlice(base, p.field)
|
||||
|
||||
nn, err := o.DecodeVarint()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
nb := int(nn) // number of bytes of encoded bools
|
||||
fin := o.index + nb
|
||||
if fin < o.index {
|
||||
return errOverflow
|
||||
}
|
||||
|
||||
y := *v
|
||||
for o.index < fin {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
y = append(y, u != 0)
|
||||
}
|
||||
|
||||
*v = y
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a slice of int32s ([]int32).
|
||||
func (o *Buffer) dec_slice_int32(p *Properties, base structPointer) error {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
structPointer_Word32Slice(base, p.field).Append(uint32(u))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a slice of int32s ([]int32) in packed format.
|
||||
func (o *Buffer) dec_slice_packed_int32(p *Properties, base structPointer) error {
|
||||
v := structPointer_Word32Slice(base, p.field)
|
||||
|
||||
nn, err := o.DecodeVarint()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
nb := int(nn) // number of bytes of encoded int32s
|
||||
|
||||
fin := o.index + nb
|
||||
if fin < o.index {
|
||||
return errOverflow
|
||||
}
|
||||
for o.index < fin {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Append(uint32(u))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a slice of int64s ([]int64).
|
||||
func (o *Buffer) dec_slice_int64(p *Properties, base structPointer) error {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
structPointer_Word64Slice(base, p.field).Append(u)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a slice of int64s ([]int64) in packed format.
|
||||
func (o *Buffer) dec_slice_packed_int64(p *Properties, base structPointer) error {
|
||||
v := structPointer_Word64Slice(base, p.field)
|
||||
|
||||
nn, err := o.DecodeVarint()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
nb := int(nn) // number of bytes of encoded int64s
|
||||
|
||||
fin := o.index + nb
|
||||
if fin < o.index {
|
||||
return errOverflow
|
||||
}
|
||||
for o.index < fin {
|
||||
u, err := p.valDec(o)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.Append(u)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a slice of strings ([]string).
|
||||
func (o *Buffer) dec_slice_string(p *Properties, base structPointer) error {
|
||||
s, err := o.DecodeStringBytes()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v := structPointer_StringSlice(base, p.field)
|
||||
*v = append(*v, s)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a slice of slice of bytes ([][]byte).
|
||||
func (o *Buffer) dec_slice_slice_byte(p *Properties, base structPointer) error {
|
||||
b, err := o.DecodeRawBytes(true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v := structPointer_BytesSlice(base, p.field)
|
||||
*v = append(*v, b)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a map field.
|
||||
func (o *Buffer) dec_new_map(p *Properties, base structPointer) error {
|
||||
raw, err := o.DecodeRawBytes(false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
oi := o.index // index at the end of this map entry
|
||||
o.index -= len(raw) // move buffer back to start of map entry
|
||||
|
||||
mptr := structPointer_NewAt(base, p.field, p.mtype) // *map[K]V
|
||||
if mptr.Elem().IsNil() {
|
||||
mptr.Elem().Set(reflect.MakeMap(mptr.Type().Elem()))
|
||||
}
|
||||
v := mptr.Elem() // map[K]V
|
||||
|
||||
// Prepare addressable doubly-indirect placeholders for the key and value types.
|
||||
// See enc_new_map for why.
|
||||
keyptr := reflect.New(reflect.PtrTo(p.mtype.Key())).Elem() // addressable *K
|
||||
keybase := toStructPointer(keyptr.Addr()) // **K
|
||||
|
||||
var valbase structPointer
|
||||
var valptr reflect.Value
|
||||
switch p.mtype.Elem().Kind() {
|
||||
case reflect.Slice:
|
||||
// []byte
|
||||
var dummy []byte
|
||||
valptr = reflect.ValueOf(&dummy) // *[]byte
|
||||
valbase = toStructPointer(valptr) // *[]byte
|
||||
case reflect.Ptr:
|
||||
// message; valptr is **Msg; need to allocate the intermediate pointer
|
||||
valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V
|
||||
valptr.Set(reflect.New(valptr.Type().Elem()))
|
||||
valbase = toStructPointer(valptr)
|
||||
default:
|
||||
// everything else
|
||||
valptr = reflect.New(reflect.PtrTo(p.mtype.Elem())).Elem() // addressable *V
|
||||
valbase = toStructPointer(valptr.Addr()) // **V
|
||||
}
|
||||
|
||||
// Decode.
|
||||
// This parses a restricted wire format, namely the encoding of a message
|
||||
// with two fields. See enc_new_map for the format.
|
||||
for o.index < oi {
|
||||
// tagcode for key and value properties are always a single byte
|
||||
// because they have tags 1 and 2.
|
||||
tagcode := o.buf[o.index]
|
||||
o.index++
|
||||
switch tagcode {
|
||||
case p.mkeyprop.tagcode[0]:
|
||||
if err := p.mkeyprop.dec(o, p.mkeyprop, keybase); err != nil {
|
||||
return err
|
||||
}
|
||||
case p.mvalprop.tagcode[0]:
|
||||
if err := p.mvalprop.dec(o, p.mvalprop, valbase); err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
// TODO: Should we silently skip this instead?
|
||||
return fmt.Errorf("proto: bad map data tag %d", raw[0])
|
||||
}
|
||||
}
|
||||
keyelem, valelem := keyptr.Elem(), valptr.Elem()
|
||||
if !keyelem.IsValid() || !valelem.IsValid() {
|
||||
// We did not decode the key or the value in the map entry.
|
||||
// Either way, it's an invalid map entry.
|
||||
return fmt.Errorf("proto: bad map data: missing key/val")
|
||||
}
|
||||
|
||||
v.SetMapIndex(keyelem, valelem)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode a group.
|
||||
func (o *Buffer) dec_struct_group(p *Properties, base structPointer) error {
|
||||
bas := structPointer_GetStructPointer(base, p.field)
|
||||
if structPointer_IsNil(bas) {
|
||||
// allocate new nested message
|
||||
bas = toStructPointer(reflect.New(p.stype))
|
||||
structPointer_SetStructPointer(base, p.field, bas)
|
||||
}
|
||||
return o.unmarshalType(p.stype, p.sprop, true, bas)
|
||||
}
|
||||
|
||||
// Decode an embedded message.
|
||||
func (o *Buffer) dec_struct_message(p *Properties, base structPointer) (err error) {
|
||||
raw, e := o.DecodeRawBytes(false)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
|
||||
bas := structPointer_GetStructPointer(base, p.field)
|
||||
if structPointer_IsNil(bas) {
|
||||
// allocate new nested message
|
||||
bas = toStructPointer(reflect.New(p.stype))
|
||||
structPointer_SetStructPointer(base, p.field, bas)
|
||||
}
|
||||
|
||||
// If the object can unmarshal itself, let it.
|
||||
if p.isUnmarshaler {
|
||||
iv := structPointer_Interface(bas, p.stype)
|
||||
return iv.(Unmarshaler).Unmarshal(raw)
|
||||
}
|
||||
|
||||
obuf := o.buf
|
||||
oi := o.index
|
||||
o.buf = raw
|
||||
o.index = 0
|
||||
|
||||
err = o.unmarshalType(p.stype, p.sprop, false, bas)
|
||||
o.buf = obuf
|
||||
o.index = oi
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// Decode a slice of embedded messages.
|
||||
func (o *Buffer) dec_slice_struct_message(p *Properties, base structPointer) error {
|
||||
return o.dec_slice_struct(p, false, base)
|
||||
}
|
||||
|
||||
// Decode a slice of embedded groups.
|
||||
func (o *Buffer) dec_slice_struct_group(p *Properties, base structPointer) error {
|
||||
return o.dec_slice_struct(p, true, base)
|
||||
}
|
||||
|
||||
// Decode a slice of structs ([]*struct).
|
||||
func (o *Buffer) dec_slice_struct(p *Properties, is_group bool, base structPointer) error {
|
||||
v := reflect.New(p.stype)
|
||||
bas := toStructPointer(v)
|
||||
structPointer_StructPointerSlice(base, p.field).Append(bas)
|
||||
|
||||
if is_group {
|
||||
err := o.unmarshalType(p.stype, p.sprop, is_group, bas)
|
||||
return err
|
||||
}
|
||||
|
||||
raw, err := o.DecodeRawBytes(false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// If the object can unmarshal itself, let it.
|
||||
if p.isUnmarshaler {
|
||||
iv := v.Interface()
|
||||
return iv.(Unmarshaler).Unmarshal(raw)
|
||||
}
|
||||
|
||||
obuf := o.buf
|
||||
oi := o.index
|
||||
o.buf = raw
|
||||
o.index = 0
|
||||
|
||||
err = o.unmarshalType(p.stype, p.sprop, is_group, bas)
|
||||
|
||||
o.buf = obuf
|
||||
o.index = oi
|
||||
|
||||
return err
|
||||
}
|
||||
1336
Godeps/_workspace/src/github.com/golang/protobuf/proto/encode.go
generated
vendored
Normal file
1336
Godeps/_workspace/src/github.com/golang/protobuf/proto/encode.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
267
Godeps/_workspace/src/github.com/golang/protobuf/proto/equal.go
generated
vendored
Normal file
267
Godeps/_workspace/src/github.com/golang/protobuf/proto/equal.go
generated
vendored
Normal file
@@ -0,0 +1,267 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Protocol buffer comparison.
|
||||
// TODO: MessageSet.
|
||||
|
||||
package proto
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"log"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
/*
|
||||
Equal returns true iff protocol buffers a and b are equal.
|
||||
The arguments must both be pointers to protocol buffer structs.
|
||||
|
||||
Equality is defined in this way:
|
||||
- Two messages are equal iff they are the same type,
|
||||
corresponding fields are equal, unknown field sets
|
||||
are equal, and extensions sets are equal.
|
||||
- Two set scalar fields are equal iff their values are equal.
|
||||
If the fields are of a floating-point type, remember that
|
||||
NaN != x for all x, including NaN.
|
||||
- Two repeated fields are equal iff their lengths are the same,
|
||||
and their corresponding elements are equal (a "bytes" field,
|
||||
although represented by []byte, is not a repeated field)
|
||||
- Two unset fields are equal.
|
||||
- Two unknown field sets are equal if their current
|
||||
encoded state is equal.
|
||||
- Two extension sets are equal iff they have corresponding
|
||||
elements that are pairwise equal.
|
||||
- Every other combination of things are not equal.
|
||||
|
||||
The return value is undefined if a and b are not protocol buffers.
|
||||
*/
|
||||
func Equal(a, b Message) bool {
|
||||
if a == nil || b == nil {
|
||||
return a == b
|
||||
}
|
||||
v1, v2 := reflect.ValueOf(a), reflect.ValueOf(b)
|
||||
if v1.Type() != v2.Type() {
|
||||
return false
|
||||
}
|
||||
if v1.Kind() == reflect.Ptr {
|
||||
if v1.IsNil() {
|
||||
return v2.IsNil()
|
||||
}
|
||||
if v2.IsNil() {
|
||||
return false
|
||||
}
|
||||
v1, v2 = v1.Elem(), v2.Elem()
|
||||
}
|
||||
if v1.Kind() != reflect.Struct {
|
||||
return false
|
||||
}
|
||||
return equalStruct(v1, v2)
|
||||
}
|
||||
|
||||
// v1 and v2 are known to have the same type.
|
||||
func equalStruct(v1, v2 reflect.Value) bool {
|
||||
for i := 0; i < v1.NumField(); i++ {
|
||||
f := v1.Type().Field(i)
|
||||
if strings.HasPrefix(f.Name, "XXX_") {
|
||||
continue
|
||||
}
|
||||
f1, f2 := v1.Field(i), v2.Field(i)
|
||||
if f.Type.Kind() == reflect.Ptr {
|
||||
if n1, n2 := f1.IsNil(), f2.IsNil(); n1 && n2 {
|
||||
// both unset
|
||||
continue
|
||||
} else if n1 != n2 {
|
||||
// set/unset mismatch
|
||||
return false
|
||||
}
|
||||
b1, ok := f1.Interface().(raw)
|
||||
if ok {
|
||||
b2 := f2.Interface().(raw)
|
||||
// RawMessage
|
||||
if !bytes.Equal(b1.Bytes(), b2.Bytes()) {
|
||||
return false
|
||||
}
|
||||
continue
|
||||
}
|
||||
f1, f2 = f1.Elem(), f2.Elem()
|
||||
}
|
||||
if !equalAny(f1, f2) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if em1 := v1.FieldByName("XXX_extensions"); em1.IsValid() {
|
||||
em2 := v2.FieldByName("XXX_extensions")
|
||||
if !equalExtensions(v1.Type(), em1.Interface().(map[int32]Extension), em2.Interface().(map[int32]Extension)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
uf := v1.FieldByName("XXX_unrecognized")
|
||||
if !uf.IsValid() {
|
||||
return true
|
||||
}
|
||||
|
||||
u1 := uf.Bytes()
|
||||
u2 := v2.FieldByName("XXX_unrecognized").Bytes()
|
||||
if !bytes.Equal(u1, u2) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// v1 and v2 are known to have the same type.
|
||||
func equalAny(v1, v2 reflect.Value) bool {
|
||||
if v1.Type() == protoMessageType {
|
||||
m1, _ := v1.Interface().(Message)
|
||||
m2, _ := v2.Interface().(Message)
|
||||
return Equal(m1, m2)
|
||||
}
|
||||
switch v1.Kind() {
|
||||
case reflect.Bool:
|
||||
return v1.Bool() == v2.Bool()
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v1.Float() == v2.Float()
|
||||
case reflect.Int32, reflect.Int64:
|
||||
return v1.Int() == v2.Int()
|
||||
case reflect.Interface:
|
||||
// Probably a oneof field; compare the inner values.
|
||||
n1, n2 := v1.IsNil(), v2.IsNil()
|
||||
if n1 || n2 {
|
||||
return n1 == n2
|
||||
}
|
||||
e1, e2 := v1.Elem(), v2.Elem()
|
||||
if e1.Type() != e2.Type() {
|
||||
return false
|
||||
}
|
||||
return equalAny(e1, e2)
|
||||
case reflect.Map:
|
||||
if v1.Len() != v2.Len() {
|
||||
return false
|
||||
}
|
||||
for _, key := range v1.MapKeys() {
|
||||
val2 := v2.MapIndex(key)
|
||||
if !val2.IsValid() {
|
||||
// This key was not found in the second map.
|
||||
return false
|
||||
}
|
||||
if !equalAny(v1.MapIndex(key), val2) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case reflect.Ptr:
|
||||
return equalAny(v1.Elem(), v2.Elem())
|
||||
case reflect.Slice:
|
||||
if v1.Type().Elem().Kind() == reflect.Uint8 {
|
||||
// short circuit: []byte
|
||||
if v1.IsNil() != v2.IsNil() {
|
||||
return false
|
||||
}
|
||||
return bytes.Equal(v1.Interface().([]byte), v2.Interface().([]byte))
|
||||
}
|
||||
|
||||
if v1.Len() != v2.Len() {
|
||||
return false
|
||||
}
|
||||
for i := 0; i < v1.Len(); i++ {
|
||||
if !equalAny(v1.Index(i), v2.Index(i)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case reflect.String:
|
||||
return v1.Interface().(string) == v2.Interface().(string)
|
||||
case reflect.Struct:
|
||||
return equalStruct(v1, v2)
|
||||
case reflect.Uint32, reflect.Uint64:
|
||||
return v1.Uint() == v2.Uint()
|
||||
}
|
||||
|
||||
// unknown type, so not a protocol buffer
|
||||
log.Printf("proto: don't know how to compare %v", v1)
|
||||
return false
|
||||
}
|
||||
|
||||
// base is the struct type that the extensions are based on.
|
||||
// em1 and em2 are extension maps.
|
||||
func equalExtensions(base reflect.Type, em1, em2 map[int32]Extension) bool {
|
||||
if len(em1) != len(em2) {
|
||||
return false
|
||||
}
|
||||
|
||||
for extNum, e1 := range em1 {
|
||||
e2, ok := em2[extNum]
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
m1, m2 := e1.value, e2.value
|
||||
|
||||
if m1 != nil && m2 != nil {
|
||||
// Both are unencoded.
|
||||
if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2)) {
|
||||
return false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// At least one is encoded. To do a semantically correct comparison
|
||||
// we need to unmarshal them first.
|
||||
var desc *ExtensionDesc
|
||||
if m := extensionMaps[base]; m != nil {
|
||||
desc = m[extNum]
|
||||
}
|
||||
if desc == nil {
|
||||
log.Printf("proto: don't know how to compare extension %d of %v", extNum, base)
|
||||
continue
|
||||
}
|
||||
var err error
|
||||
if m1 == nil {
|
||||
m1, err = decodeExtension(e1.enc, desc)
|
||||
}
|
||||
if m2 == nil && err == nil {
|
||||
m2, err = decodeExtension(e2.enc, desc)
|
||||
}
|
||||
if err != nil {
|
||||
// The encoded form is invalid.
|
||||
log.Printf("proto: badly encoded extension %d of %v: %v", extNum, base, err)
|
||||
return false
|
||||
}
|
||||
if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
400
Godeps/_workspace/src/github.com/golang/protobuf/proto/extensions.go
generated
vendored
Normal file
400
Godeps/_workspace/src/github.com/golang/protobuf/proto/extensions.go
generated
vendored
Normal file
@@ -0,0 +1,400 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package proto
|
||||
|
||||
/*
|
||||
* Types and routines for supporting protocol buffer extensions.
|
||||
*/
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// ErrMissingExtension is the error returned by GetExtension if the named extension is not in the message.
|
||||
var ErrMissingExtension = errors.New("proto: missing extension")
|
||||
|
||||
// ExtensionRange represents a range of message extensions for a protocol buffer.
|
||||
// Used in code generated by the protocol compiler.
|
||||
type ExtensionRange struct {
|
||||
Start, End int32 // both inclusive
|
||||
}
|
||||
|
||||
// extendableProto is an interface implemented by any protocol buffer that may be extended.
|
||||
type extendableProto interface {
|
||||
Message
|
||||
ExtensionRangeArray() []ExtensionRange
|
||||
ExtensionMap() map[int32]Extension
|
||||
}
|
||||
|
||||
var extendableProtoType = reflect.TypeOf((*extendableProto)(nil)).Elem()
|
||||
|
||||
// ExtensionDesc represents an extension specification.
|
||||
// Used in generated code from the protocol compiler.
|
||||
type ExtensionDesc struct {
|
||||
ExtendedType Message // nil pointer to the type that is being extended
|
||||
ExtensionType interface{} // nil pointer to the extension type
|
||||
Field int32 // field number
|
||||
Name string // fully-qualified name of extension, for text formatting
|
||||
Tag string // protobuf tag style
|
||||
}
|
||||
|
||||
func (ed *ExtensionDesc) repeated() bool {
|
||||
t := reflect.TypeOf(ed.ExtensionType)
|
||||
return t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8
|
||||
}
|
||||
|
||||
// Extension represents an extension in a message.
|
||||
type Extension struct {
|
||||
// When an extension is stored in a message using SetExtension
|
||||
// only desc and value are set. When the message is marshaled
|
||||
// enc will be set to the encoded form of the message.
|
||||
//
|
||||
// When a message is unmarshaled and contains extensions, each
|
||||
// extension will have only enc set. When such an extension is
|
||||
// accessed using GetExtension (or GetExtensions) desc and value
|
||||
// will be set.
|
||||
desc *ExtensionDesc
|
||||
value interface{}
|
||||
enc []byte
|
||||
}
|
||||
|
||||
// SetRawExtension is for testing only.
|
||||
func SetRawExtension(base extendableProto, id int32, b []byte) {
|
||||
base.ExtensionMap()[id] = Extension{enc: b}
|
||||
}
|
||||
|
||||
// isExtensionField returns true iff the given field number is in an extension range.
|
||||
func isExtensionField(pb extendableProto, field int32) bool {
|
||||
for _, er := range pb.ExtensionRangeArray() {
|
||||
if er.Start <= field && field <= er.End {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// checkExtensionTypes checks that the given extension is valid for pb.
|
||||
func checkExtensionTypes(pb extendableProto, extension *ExtensionDesc) error {
|
||||
// Check the extended type.
|
||||
if a, b := reflect.TypeOf(pb), reflect.TypeOf(extension.ExtendedType); a != b {
|
||||
return errors.New("proto: bad extended type; " + b.String() + " does not extend " + a.String())
|
||||
}
|
||||
// Check the range.
|
||||
if !isExtensionField(pb, extension.Field) {
|
||||
return errors.New("proto: bad extension number; not in declared ranges")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// extPropKey is sufficient to uniquely identify an extension.
|
||||
type extPropKey struct {
|
||||
base reflect.Type
|
||||
field int32
|
||||
}
|
||||
|
||||
var extProp = struct {
|
||||
sync.RWMutex
|
||||
m map[extPropKey]*Properties
|
||||
}{
|
||||
m: make(map[extPropKey]*Properties),
|
||||
}
|
||||
|
||||
func extensionProperties(ed *ExtensionDesc) *Properties {
|
||||
key := extPropKey{base: reflect.TypeOf(ed.ExtendedType), field: ed.Field}
|
||||
|
||||
extProp.RLock()
|
||||
if prop, ok := extProp.m[key]; ok {
|
||||
extProp.RUnlock()
|
||||
return prop
|
||||
}
|
||||
extProp.RUnlock()
|
||||
|
||||
extProp.Lock()
|
||||
defer extProp.Unlock()
|
||||
// Check again.
|
||||
if prop, ok := extProp.m[key]; ok {
|
||||
return prop
|
||||
}
|
||||
|
||||
prop := new(Properties)
|
||||
prop.Init(reflect.TypeOf(ed.ExtensionType), "unknown_name", ed.Tag, nil)
|
||||
extProp.m[key] = prop
|
||||
return prop
|
||||
}
|
||||
|
||||
// encodeExtensionMap encodes any unmarshaled (unencoded) extensions in m.
|
||||
func encodeExtensionMap(m map[int32]Extension) error {
|
||||
for k, e := range m {
|
||||
if e.value == nil || e.desc == nil {
|
||||
// Extension is only in its encoded form.
|
||||
continue
|
||||
}
|
||||
|
||||
// We don't skip extensions that have an encoded form set,
|
||||
// because the extension value may have been mutated after
|
||||
// the last time this function was called.
|
||||
|
||||
et := reflect.TypeOf(e.desc.ExtensionType)
|
||||
props := extensionProperties(e.desc)
|
||||
|
||||
p := NewBuffer(nil)
|
||||
// If e.value has type T, the encoder expects a *struct{ X T }.
|
||||
// Pass a *T with a zero field and hope it all works out.
|
||||
x := reflect.New(et)
|
||||
x.Elem().Set(reflect.ValueOf(e.value))
|
||||
if err := props.enc(p, props, toStructPointer(x)); err != nil {
|
||||
return err
|
||||
}
|
||||
e.enc = p.buf
|
||||
m[k] = e
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sizeExtensionMap(m map[int32]Extension) (n int) {
|
||||
for _, e := range m {
|
||||
if e.value == nil || e.desc == nil {
|
||||
// Extension is only in its encoded form.
|
||||
n += len(e.enc)
|
||||
continue
|
||||
}
|
||||
|
||||
// We don't skip extensions that have an encoded form set,
|
||||
// because the extension value may have been mutated after
|
||||
// the last time this function was called.
|
||||
|
||||
et := reflect.TypeOf(e.desc.ExtensionType)
|
||||
props := extensionProperties(e.desc)
|
||||
|
||||
// If e.value has type T, the encoder expects a *struct{ X T }.
|
||||
// Pass a *T with a zero field and hope it all works out.
|
||||
x := reflect.New(et)
|
||||
x.Elem().Set(reflect.ValueOf(e.value))
|
||||
n += props.size(props, toStructPointer(x))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// HasExtension returns whether the given extension is present in pb.
|
||||
func HasExtension(pb extendableProto, extension *ExtensionDesc) bool {
|
||||
// TODO: Check types, field numbers, etc.?
|
||||
_, ok := pb.ExtensionMap()[extension.Field]
|
||||
return ok
|
||||
}
|
||||
|
||||
// ClearExtension removes the given extension from pb.
|
||||
func ClearExtension(pb extendableProto, extension *ExtensionDesc) {
|
||||
// TODO: Check types, field numbers, etc.?
|
||||
delete(pb.ExtensionMap(), extension.Field)
|
||||
}
|
||||
|
||||
// GetExtension parses and returns the given extension of pb.
|
||||
// If the extension is not present and has no default value it returns ErrMissingExtension.
|
||||
func GetExtension(pb extendableProto, extension *ExtensionDesc) (interface{}, error) {
|
||||
if err := checkExtensionTypes(pb, extension); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
emap := pb.ExtensionMap()
|
||||
e, ok := emap[extension.Field]
|
||||
if !ok {
|
||||
// defaultExtensionValue returns the default value or
|
||||
// ErrMissingExtension if there is no default.
|
||||
return defaultExtensionValue(extension)
|
||||
}
|
||||
|
||||
if e.value != nil {
|
||||
// Already decoded. Check the descriptor, though.
|
||||
if e.desc != extension {
|
||||
// This shouldn't happen. If it does, it means that
|
||||
// GetExtension was called twice with two different
|
||||
// descriptors with the same field number.
|
||||
return nil, errors.New("proto: descriptor conflict")
|
||||
}
|
||||
return e.value, nil
|
||||
}
|
||||
|
||||
v, err := decodeExtension(e.enc, extension)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Remember the decoded version and drop the encoded version.
|
||||
// That way it is safe to mutate what we return.
|
||||
e.value = v
|
||||
e.desc = extension
|
||||
e.enc = nil
|
||||
emap[extension.Field] = e
|
||||
return e.value, nil
|
||||
}
|
||||
|
||||
// defaultExtensionValue returns the default value for extension.
|
||||
// If no default for an extension is defined ErrMissingExtension is returned.
|
||||
func defaultExtensionValue(extension *ExtensionDesc) (interface{}, error) {
|
||||
t := reflect.TypeOf(extension.ExtensionType)
|
||||
props := extensionProperties(extension)
|
||||
|
||||
sf, _, err := fieldDefault(t, props)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if sf == nil || sf.value == nil {
|
||||
// There is no default value.
|
||||
return nil, ErrMissingExtension
|
||||
}
|
||||
|
||||
if t.Kind() != reflect.Ptr {
|
||||
// We do not need to return a Ptr, we can directly return sf.value.
|
||||
return sf.value, nil
|
||||
}
|
||||
|
||||
// We need to return an interface{} that is a pointer to sf.value.
|
||||
value := reflect.New(t).Elem()
|
||||
value.Set(reflect.New(value.Type().Elem()))
|
||||
if sf.kind == reflect.Int32 {
|
||||
// We may have an int32 or an enum, but the underlying data is int32.
|
||||
// Since we can't set an int32 into a non int32 reflect.value directly
|
||||
// set it as a int32.
|
||||
value.Elem().SetInt(int64(sf.value.(int32)))
|
||||
} else {
|
||||
value.Elem().Set(reflect.ValueOf(sf.value))
|
||||
}
|
||||
return value.Interface(), nil
|
||||
}
|
||||
|
||||
// decodeExtension decodes an extension encoded in b.
|
||||
func decodeExtension(b []byte, extension *ExtensionDesc) (interface{}, error) {
|
||||
o := NewBuffer(b)
|
||||
|
||||
t := reflect.TypeOf(extension.ExtensionType)
|
||||
rep := extension.repeated()
|
||||
|
||||
props := extensionProperties(extension)
|
||||
|
||||
// t is a pointer to a struct, pointer to basic type or a slice.
|
||||
// Allocate a "field" to store the pointer/slice itself; the
|
||||
// pointer/slice will be stored here. We pass
|
||||
// the address of this field to props.dec.
|
||||
// This passes a zero field and a *t and lets props.dec
|
||||
// interpret it as a *struct{ x t }.
|
||||
value := reflect.New(t).Elem()
|
||||
|
||||
for {
|
||||
// Discard wire type and field number varint. It isn't needed.
|
||||
if _, err := o.DecodeVarint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := props.dec(o, props, toStructPointer(value.Addr())); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !rep || o.index >= len(o.buf) {
|
||||
break
|
||||
}
|
||||
}
|
||||
return value.Interface(), nil
|
||||
}
|
||||
|
||||
// GetExtensions returns a slice of the extensions present in pb that are also listed in es.
|
||||
// The returned slice has the same length as es; missing extensions will appear as nil elements.
|
||||
func GetExtensions(pb Message, es []*ExtensionDesc) (extensions []interface{}, err error) {
|
||||
epb, ok := pb.(extendableProto)
|
||||
if !ok {
|
||||
err = errors.New("proto: not an extendable proto")
|
||||
return
|
||||
}
|
||||
extensions = make([]interface{}, len(es))
|
||||
for i, e := range es {
|
||||
extensions[i], err = GetExtension(epb, e)
|
||||
if err == ErrMissingExtension {
|
||||
err = nil
|
||||
}
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// SetExtension sets the specified extension of pb to the specified value.
|
||||
func SetExtension(pb extendableProto, extension *ExtensionDesc, value interface{}) error {
|
||||
if err := checkExtensionTypes(pb, extension); err != nil {
|
||||
return err
|
||||
}
|
||||
typ := reflect.TypeOf(extension.ExtensionType)
|
||||
if typ != reflect.TypeOf(value) {
|
||||
return errors.New("proto: bad extension value type")
|
||||
}
|
||||
// nil extension values need to be caught early, because the
|
||||
// encoder can't distinguish an ErrNil due to a nil extension
|
||||
// from an ErrNil due to a missing field. Extensions are
|
||||
// always optional, so the encoder would just swallow the error
|
||||
// and drop all the extensions from the encoded message.
|
||||
if reflect.ValueOf(value).IsNil() {
|
||||
return fmt.Errorf("proto: SetExtension called with nil value of type %T", value)
|
||||
}
|
||||
|
||||
pb.ExtensionMap()[extension.Field] = Extension{desc: extension, value: value}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A global registry of extensions.
|
||||
// The generated code will register the generated descriptors by calling RegisterExtension.
|
||||
|
||||
var extensionMaps = make(map[reflect.Type]map[int32]*ExtensionDesc)
|
||||
|
||||
// RegisterExtension is called from the generated code.
|
||||
func RegisterExtension(desc *ExtensionDesc) {
|
||||
st := reflect.TypeOf(desc.ExtendedType).Elem()
|
||||
m := extensionMaps[st]
|
||||
if m == nil {
|
||||
m = make(map[int32]*ExtensionDesc)
|
||||
extensionMaps[st] = m
|
||||
}
|
||||
if _, ok := m[desc.Field]; ok {
|
||||
panic("proto: duplicate extension registered: " + st.String() + " " + strconv.Itoa(int(desc.Field)))
|
||||
}
|
||||
m[desc.Field] = desc
|
||||
}
|
||||
|
||||
// RegisteredExtensions returns a map of the registered extensions of a
|
||||
// protocol buffer struct, indexed by the extension number.
|
||||
// The argument pb should be a nil pointer to the struct type.
|
||||
func RegisteredExtensions(pb Message) map[int32]*ExtensionDesc {
|
||||
return extensionMaps[reflect.TypeOf(pb).Elem()]
|
||||
}
|
||||
883
Godeps/_workspace/src/github.com/golang/protobuf/proto/lib.go
generated
vendored
Normal file
883
Godeps/_workspace/src/github.com/golang/protobuf/proto/lib.go
generated
vendored
Normal file
@@ -0,0 +1,883 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
/*
|
||||
Package proto converts data structures to and from the wire format of
|
||||
protocol buffers. It works in concert with the Go source code generated
|
||||
for .proto files by the protocol compiler.
|
||||
|
||||
A summary of the properties of the protocol buffer interface
|
||||
for a protocol buffer variable v:
|
||||
|
||||
- Names are turned from camel_case to CamelCase for export.
|
||||
- There are no methods on v to set fields; just treat
|
||||
them as structure fields.
|
||||
- There are getters that return a field's value if set,
|
||||
and return the field's default value if unset.
|
||||
The getters work even if the receiver is a nil message.
|
||||
- The zero value for a struct is its correct initialization state.
|
||||
All desired fields must be set before marshaling.
|
||||
- A Reset() method will restore a protobuf struct to its zero state.
|
||||
- Non-repeated fields are pointers to the values; nil means unset.
|
||||
That is, optional or required field int32 f becomes F *int32.
|
||||
- Repeated fields are slices.
|
||||
- Helper functions are available to aid the setting of fields.
|
||||
msg.Foo = proto.String("hello") // set field
|
||||
- Constants are defined to hold the default values of all fields that
|
||||
have them. They have the form Default_StructName_FieldName.
|
||||
Because the getter methods handle defaulted values,
|
||||
direct use of these constants should be rare.
|
||||
- Enums are given type names and maps from names to values.
|
||||
Enum values are prefixed by the enclosing message's name, or by the
|
||||
enum's type name if it is a top-level enum. Enum types have a String
|
||||
method, and a Enum method to assist in message construction.
|
||||
- Nested messages, groups and enums have type names prefixed with the name of
|
||||
the surrounding message type.
|
||||
- Extensions are given descriptor names that start with E_,
|
||||
followed by an underscore-delimited list of the nested messages
|
||||
that contain it (if any) followed by the CamelCased name of the
|
||||
extension field itself. HasExtension, ClearExtension, GetExtension
|
||||
and SetExtension are functions for manipulating extensions.
|
||||
- Oneof field sets are given a single field in their message,
|
||||
with distinguished wrapper types for each possible field value.
|
||||
- Marshal and Unmarshal are functions to encode and decode the wire format.
|
||||
|
||||
The simplest way to describe this is to see an example.
|
||||
Given file test.proto, containing
|
||||
|
||||
package example;
|
||||
|
||||
enum FOO { X = 17; }
|
||||
|
||||
message Test {
|
||||
required string label = 1;
|
||||
optional int32 type = 2 [default=77];
|
||||
repeated int64 reps = 3;
|
||||
optional group OptionalGroup = 4 {
|
||||
required string RequiredField = 5;
|
||||
}
|
||||
oneof union {
|
||||
int32 number = 6;
|
||||
string name = 7;
|
||||
}
|
||||
}
|
||||
|
||||
The resulting file, test.pb.go, is:
|
||||
|
||||
package example
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import math "math"
|
||||
|
||||
type FOO int32
|
||||
const (
|
||||
FOO_X FOO = 17
|
||||
)
|
||||
var FOO_name = map[int32]string{
|
||||
17: "X",
|
||||
}
|
||||
var FOO_value = map[string]int32{
|
||||
"X": 17,
|
||||
}
|
||||
|
||||
func (x FOO) Enum() *FOO {
|
||||
p := new(FOO)
|
||||
*p = x
|
||||
return p
|
||||
}
|
||||
func (x FOO) String() string {
|
||||
return proto.EnumName(FOO_name, int32(x))
|
||||
}
|
||||
func (x *FOO) UnmarshalJSON(data []byte) error {
|
||||
value, err := proto.UnmarshalJSONEnum(FOO_value, data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*x = FOO(value)
|
||||
return nil
|
||||
}
|
||||
|
||||
type Test struct {
|
||||
Label *string `protobuf:"bytes,1,req,name=label" json:"label,omitempty"`
|
||||
Type *int32 `protobuf:"varint,2,opt,name=type,def=77" json:"type,omitempty"`
|
||||
Reps []int64 `protobuf:"varint,3,rep,name=reps" json:"reps,omitempty"`
|
||||
Optionalgroup *Test_OptionalGroup `protobuf:"group,4,opt,name=OptionalGroup" json:"optionalgroup,omitempty"`
|
||||
// Types that are valid to be assigned to Union:
|
||||
// *Test_Number
|
||||
// *Test_Name
|
||||
Union isTest_Union `protobuf_oneof:"union"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
}
|
||||
func (m *Test) Reset() { *m = Test{} }
|
||||
func (m *Test) String() string { return proto.CompactTextString(m) }
|
||||
func (*Test) ProtoMessage() {}
|
||||
|
||||
type isTest_Union interface {
|
||||
isTest_Union()
|
||||
}
|
||||
|
||||
type Test_Number struct {
|
||||
Number int32 `protobuf:"varint,6,opt,name=number"`
|
||||
}
|
||||
type Test_Name struct {
|
||||
Name string `protobuf:"bytes,7,opt,name=name"`
|
||||
}
|
||||
|
||||
func (*Test_Number) isTest_Union() {}
|
||||
func (*Test_Name) isTest_Union() {}
|
||||
|
||||
func (m *Test) GetUnion() isTest_Union {
|
||||
if m != nil {
|
||||
return m.Union
|
||||
}
|
||||
return nil
|
||||
}
|
||||
const Default_Test_Type int32 = 77
|
||||
|
||||
func (m *Test) GetLabel() string {
|
||||
if m != nil && m.Label != nil {
|
||||
return *m.Label
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Test) GetType() int32 {
|
||||
if m != nil && m.Type != nil {
|
||||
return *m.Type
|
||||
}
|
||||
return Default_Test_Type
|
||||
}
|
||||
|
||||
func (m *Test) GetOptionalgroup() *Test_OptionalGroup {
|
||||
if m != nil {
|
||||
return m.Optionalgroup
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Test_OptionalGroup struct {
|
||||
RequiredField *string `protobuf:"bytes,5,req" json:"RequiredField,omitempty"`
|
||||
}
|
||||
func (m *Test_OptionalGroup) Reset() { *m = Test_OptionalGroup{} }
|
||||
func (m *Test_OptionalGroup) String() string { return proto.CompactTextString(m) }
|
||||
|
||||
func (m *Test_OptionalGroup) GetRequiredField() string {
|
||||
if m != nil && m.RequiredField != nil {
|
||||
return *m.RequiredField
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Test) GetNumber() int32 {
|
||||
if x, ok := m.GetUnion().(*Test_Number); ok {
|
||||
return x.Number
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Test) GetName() string {
|
||||
if x, ok := m.GetUnion().(*Test_Name); ok {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterEnum("example.FOO", FOO_name, FOO_value)
|
||||
}
|
||||
|
||||
To create and play with a Test object:
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
pb "./example.pb"
|
||||
)
|
||||
|
||||
func main() {
|
||||
test := &pb.Test{
|
||||
Label: proto.String("hello"),
|
||||
Type: proto.Int32(17),
|
||||
Optionalgroup: &pb.Test_OptionalGroup{
|
||||
RequiredField: proto.String("good bye"),
|
||||
},
|
||||
Union: &pb.Test_Name{"fred"},
|
||||
}
|
||||
data, err := proto.Marshal(test)
|
||||
if err != nil {
|
||||
log.Fatal("marshaling error: ", err)
|
||||
}
|
||||
newTest := &pb.Test{}
|
||||
err = proto.Unmarshal(data, newTest)
|
||||
if err != nil {
|
||||
log.Fatal("unmarshaling error: ", err)
|
||||
}
|
||||
// Now test and newTest contain the same data.
|
||||
if test.GetLabel() != newTest.GetLabel() {
|
||||
log.Fatalf("data mismatch %q != %q", test.GetLabel(), newTest.GetLabel())
|
||||
}
|
||||
// Use a type switch to determine which oneof was set.
|
||||
switch u := test.Union.(type) {
|
||||
case *pb.Test_Number: // u.Number contains the number.
|
||||
case *pb.Test_Name: // u.Name contains the string.
|
||||
}
|
||||
// etc.
|
||||
}
|
||||
*/
|
||||
package proto
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Message is implemented by generated protocol buffer messages.
|
||||
type Message interface {
|
||||
Reset()
|
||||
String() string
|
||||
ProtoMessage()
|
||||
}
|
||||
|
||||
// Stats records allocation details about the protocol buffer encoders
|
||||
// and decoders. Useful for tuning the library itself.
|
||||
type Stats struct {
|
||||
Emalloc uint64 // mallocs in encode
|
||||
Dmalloc uint64 // mallocs in decode
|
||||
Encode uint64 // number of encodes
|
||||
Decode uint64 // number of decodes
|
||||
Chit uint64 // number of cache hits
|
||||
Cmiss uint64 // number of cache misses
|
||||
Size uint64 // number of sizes
|
||||
}
|
||||
|
||||
// Set to true to enable stats collection.
|
||||
const collectStats = false
|
||||
|
||||
var stats Stats
|
||||
|
||||
// GetStats returns a copy of the global Stats structure.
|
||||
func GetStats() Stats { return stats }
|
||||
|
||||
// A Buffer is a buffer manager for marshaling and unmarshaling
|
||||
// protocol buffers. It may be reused between invocations to
|
||||
// reduce memory usage. It is not necessary to use a Buffer;
|
||||
// the global functions Marshal and Unmarshal create a
|
||||
// temporary Buffer and are fine for most applications.
|
||||
type Buffer struct {
|
||||
buf []byte // encode/decode byte stream
|
||||
index int // write point
|
||||
|
||||
// pools of basic types to amortize allocation.
|
||||
bools []bool
|
||||
uint32s []uint32
|
||||
uint64s []uint64
|
||||
|
||||
// extra pools, only used with pointer_reflect.go
|
||||
int32s []int32
|
||||
int64s []int64
|
||||
float32s []float32
|
||||
float64s []float64
|
||||
}
|
||||
|
||||
// NewBuffer allocates a new Buffer and initializes its internal data to
|
||||
// the contents of the argument slice.
|
||||
func NewBuffer(e []byte) *Buffer {
|
||||
return &Buffer{buf: e}
|
||||
}
|
||||
|
||||
// Reset resets the Buffer, ready for marshaling a new protocol buffer.
|
||||
func (p *Buffer) Reset() {
|
||||
p.buf = p.buf[0:0] // for reading/writing
|
||||
p.index = 0 // for reading
|
||||
}
|
||||
|
||||
// SetBuf replaces the internal buffer with the slice,
|
||||
// ready for unmarshaling the contents of the slice.
|
||||
func (p *Buffer) SetBuf(s []byte) {
|
||||
p.buf = s
|
||||
p.index = 0
|
||||
}
|
||||
|
||||
// Bytes returns the contents of the Buffer.
|
||||
func (p *Buffer) Bytes() []byte { return p.buf }
|
||||
|
||||
/*
|
||||
* Helper routines for simplifying the creation of optional fields of basic type.
|
||||
*/
|
||||
|
||||
// Bool is a helper routine that allocates a new bool value
|
||||
// to store v and returns a pointer to it.
|
||||
func Bool(v bool) *bool {
|
||||
return &v
|
||||
}
|
||||
|
||||
// Int32 is a helper routine that allocates a new int32 value
|
||||
// to store v and returns a pointer to it.
|
||||
func Int32(v int32) *int32 {
|
||||
return &v
|
||||
}
|
||||
|
||||
// Int is a helper routine that allocates a new int32 value
|
||||
// to store v and returns a pointer to it, but unlike Int32
|
||||
// its argument value is an int.
|
||||
func Int(v int) *int32 {
|
||||
p := new(int32)
|
||||
*p = int32(v)
|
||||
return p
|
||||
}
|
||||
|
||||
// Int64 is a helper routine that allocates a new int64 value
|
||||
// to store v and returns a pointer to it.
|
||||
func Int64(v int64) *int64 {
|
||||
return &v
|
||||
}
|
||||
|
||||
// Float32 is a helper routine that allocates a new float32 value
|
||||
// to store v and returns a pointer to it.
|
||||
func Float32(v float32) *float32 {
|
||||
return &v
|
||||
}
|
||||
|
||||
// Float64 is a helper routine that allocates a new float64 value
|
||||
// to store v and returns a pointer to it.
|
||||
func Float64(v float64) *float64 {
|
||||
return &v
|
||||
}
|
||||
|
||||
// Uint32 is a helper routine that allocates a new uint32 value
|
||||
// to store v and returns a pointer to it.
|
||||
func Uint32(v uint32) *uint32 {
|
||||
return &v
|
||||
}
|
||||
|
||||
// Uint64 is a helper routine that allocates a new uint64 value
|
||||
// to store v and returns a pointer to it.
|
||||
func Uint64(v uint64) *uint64 {
|
||||
return &v
|
||||
}
|
||||
|
||||
// String is a helper routine that allocates a new string value
|
||||
// to store v and returns a pointer to it.
|
||||
func String(v string) *string {
|
||||
return &v
|
||||
}
|
||||
|
||||
// EnumName is a helper function to simplify printing protocol buffer enums
|
||||
// by name. Given an enum map and a value, it returns a useful string.
|
||||
func EnumName(m map[int32]string, v int32) string {
|
||||
s, ok := m[v]
|
||||
if ok {
|
||||
return s
|
||||
}
|
||||
return strconv.Itoa(int(v))
|
||||
}
|
||||
|
||||
// UnmarshalJSONEnum is a helper function to simplify recovering enum int values
|
||||
// from their JSON-encoded representation. Given a map from the enum's symbolic
|
||||
// names to its int values, and a byte buffer containing the JSON-encoded
|
||||
// value, it returns an int32 that can be cast to the enum type by the caller.
|
||||
//
|
||||
// The function can deal with both JSON representations, numeric and symbolic.
|
||||
func UnmarshalJSONEnum(m map[string]int32, data []byte, enumName string) (int32, error) {
|
||||
if data[0] == '"' {
|
||||
// New style: enums are strings.
|
||||
var repr string
|
||||
if err := json.Unmarshal(data, &repr); err != nil {
|
||||
return -1, err
|
||||
}
|
||||
val, ok := m[repr]
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("unrecognized enum %s value %q", enumName, repr)
|
||||
}
|
||||
return val, nil
|
||||
}
|
||||
// Old style: enums are ints.
|
||||
var val int32
|
||||
if err := json.Unmarshal(data, &val); err != nil {
|
||||
return 0, fmt.Errorf("cannot unmarshal %#q into enum %s", data, enumName)
|
||||
}
|
||||
return val, nil
|
||||
}
|
||||
|
||||
// DebugPrint dumps the encoded data in b in a debugging format with a header
|
||||
// including the string s. Used in testing but made available for general debugging.
|
||||
func (p *Buffer) DebugPrint(s string, b []byte) {
|
||||
var u uint64
|
||||
|
||||
obuf := p.buf
|
||||
index := p.index
|
||||
p.buf = b
|
||||
p.index = 0
|
||||
depth := 0
|
||||
|
||||
fmt.Printf("\n--- %s ---\n", s)
|
||||
|
||||
out:
|
||||
for {
|
||||
for i := 0; i < depth; i++ {
|
||||
fmt.Print(" ")
|
||||
}
|
||||
|
||||
index := p.index
|
||||
if index == len(p.buf) {
|
||||
break
|
||||
}
|
||||
|
||||
op, err := p.DecodeVarint()
|
||||
if err != nil {
|
||||
fmt.Printf("%3d: fetching op err %v\n", index, err)
|
||||
break out
|
||||
}
|
||||
tag := op >> 3
|
||||
wire := op & 7
|
||||
|
||||
switch wire {
|
||||
default:
|
||||
fmt.Printf("%3d: t=%3d unknown wire=%d\n",
|
||||
index, tag, wire)
|
||||
break out
|
||||
|
||||
case WireBytes:
|
||||
var r []byte
|
||||
|
||||
r, err = p.DecodeRawBytes(false)
|
||||
if err != nil {
|
||||
break out
|
||||
}
|
||||
fmt.Printf("%3d: t=%3d bytes [%d]", index, tag, len(r))
|
||||
if len(r) <= 6 {
|
||||
for i := 0; i < len(r); i++ {
|
||||
fmt.Printf(" %.2x", r[i])
|
||||
}
|
||||
} else {
|
||||
for i := 0; i < 3; i++ {
|
||||
fmt.Printf(" %.2x", r[i])
|
||||
}
|
||||
fmt.Printf(" ..")
|
||||
for i := len(r) - 3; i < len(r); i++ {
|
||||
fmt.Printf(" %.2x", r[i])
|
||||
}
|
||||
}
|
||||
fmt.Printf("\n")
|
||||
|
||||
case WireFixed32:
|
||||
u, err = p.DecodeFixed32()
|
||||
if err != nil {
|
||||
fmt.Printf("%3d: t=%3d fix32 err %v\n", index, tag, err)
|
||||
break out
|
||||
}
|
||||
fmt.Printf("%3d: t=%3d fix32 %d\n", index, tag, u)
|
||||
|
||||
case WireFixed64:
|
||||
u, err = p.DecodeFixed64()
|
||||
if err != nil {
|
||||
fmt.Printf("%3d: t=%3d fix64 err %v\n", index, tag, err)
|
||||
break out
|
||||
}
|
||||
fmt.Printf("%3d: t=%3d fix64 %d\n", index, tag, u)
|
||||
|
||||
case WireVarint:
|
||||
u, err = p.DecodeVarint()
|
||||
if err != nil {
|
||||
fmt.Printf("%3d: t=%3d varint err %v\n", index, tag, err)
|
||||
break out
|
||||
}
|
||||
fmt.Printf("%3d: t=%3d varint %d\n", index, tag, u)
|
||||
|
||||
case WireStartGroup:
|
||||
fmt.Printf("%3d: t=%3d start\n", index, tag)
|
||||
depth++
|
||||
|
||||
case WireEndGroup:
|
||||
depth--
|
||||
fmt.Printf("%3d: t=%3d end\n", index, tag)
|
||||
}
|
||||
}
|
||||
|
||||
if depth != 0 {
|
||||
fmt.Printf("%3d: start-end not balanced %d\n", p.index, depth)
|
||||
}
|
||||
fmt.Printf("\n")
|
||||
|
||||
p.buf = obuf
|
||||
p.index = index
|
||||
}
|
||||
|
||||
// SetDefaults sets unset protocol buffer fields to their default values.
|
||||
// It only modifies fields that are both unset and have defined defaults.
|
||||
// It recursively sets default values in any non-nil sub-messages.
|
||||
func SetDefaults(pb Message) {
|
||||
setDefaults(reflect.ValueOf(pb), true, false)
|
||||
}
|
||||
|
||||
// v is a pointer to a struct.
|
||||
func setDefaults(v reflect.Value, recur, zeros bool) {
|
||||
v = v.Elem()
|
||||
|
||||
defaultMu.RLock()
|
||||
dm, ok := defaults[v.Type()]
|
||||
defaultMu.RUnlock()
|
||||
if !ok {
|
||||
dm = buildDefaultMessage(v.Type())
|
||||
defaultMu.Lock()
|
||||
defaults[v.Type()] = dm
|
||||
defaultMu.Unlock()
|
||||
}
|
||||
|
||||
for _, sf := range dm.scalars {
|
||||
f := v.Field(sf.index)
|
||||
if !f.IsNil() {
|
||||
// field already set
|
||||
continue
|
||||
}
|
||||
dv := sf.value
|
||||
if dv == nil && !zeros {
|
||||
// no explicit default, and don't want to set zeros
|
||||
continue
|
||||
}
|
||||
fptr := f.Addr().Interface() // **T
|
||||
// TODO: Consider batching the allocations we do here.
|
||||
switch sf.kind {
|
||||
case reflect.Bool:
|
||||
b := new(bool)
|
||||
if dv != nil {
|
||||
*b = dv.(bool)
|
||||
}
|
||||
*(fptr.(**bool)) = b
|
||||
case reflect.Float32:
|
||||
f := new(float32)
|
||||
if dv != nil {
|
||||
*f = dv.(float32)
|
||||
}
|
||||
*(fptr.(**float32)) = f
|
||||
case reflect.Float64:
|
||||
f := new(float64)
|
||||
if dv != nil {
|
||||
*f = dv.(float64)
|
||||
}
|
||||
*(fptr.(**float64)) = f
|
||||
case reflect.Int32:
|
||||
// might be an enum
|
||||
if ft := f.Type(); ft != int32PtrType {
|
||||
// enum
|
||||
f.Set(reflect.New(ft.Elem()))
|
||||
if dv != nil {
|
||||
f.Elem().SetInt(int64(dv.(int32)))
|
||||
}
|
||||
} else {
|
||||
// int32 field
|
||||
i := new(int32)
|
||||
if dv != nil {
|
||||
*i = dv.(int32)
|
||||
}
|
||||
*(fptr.(**int32)) = i
|
||||
}
|
||||
case reflect.Int64:
|
||||
i := new(int64)
|
||||
if dv != nil {
|
||||
*i = dv.(int64)
|
||||
}
|
||||
*(fptr.(**int64)) = i
|
||||
case reflect.String:
|
||||
s := new(string)
|
||||
if dv != nil {
|
||||
*s = dv.(string)
|
||||
}
|
||||
*(fptr.(**string)) = s
|
||||
case reflect.Uint8:
|
||||
// exceptional case: []byte
|
||||
var b []byte
|
||||
if dv != nil {
|
||||
db := dv.([]byte)
|
||||
b = make([]byte, len(db))
|
||||
copy(b, db)
|
||||
} else {
|
||||
b = []byte{}
|
||||
}
|
||||
*(fptr.(*[]byte)) = b
|
||||
case reflect.Uint32:
|
||||
u := new(uint32)
|
||||
if dv != nil {
|
||||
*u = dv.(uint32)
|
||||
}
|
||||
*(fptr.(**uint32)) = u
|
||||
case reflect.Uint64:
|
||||
u := new(uint64)
|
||||
if dv != nil {
|
||||
*u = dv.(uint64)
|
||||
}
|
||||
*(fptr.(**uint64)) = u
|
||||
default:
|
||||
log.Printf("proto: can't set default for field %v (sf.kind=%v)", f, sf.kind)
|
||||
}
|
||||
}
|
||||
|
||||
for _, ni := range dm.nested {
|
||||
f := v.Field(ni)
|
||||
// f is *T or []*T or map[T]*T
|
||||
switch f.Kind() {
|
||||
case reflect.Ptr:
|
||||
if f.IsNil() {
|
||||
continue
|
||||
}
|
||||
setDefaults(f, recur, zeros)
|
||||
|
||||
case reflect.Slice:
|
||||
for i := 0; i < f.Len(); i++ {
|
||||
e := f.Index(i)
|
||||
if e.IsNil() {
|
||||
continue
|
||||
}
|
||||
setDefaults(e, recur, zeros)
|
||||
}
|
||||
|
||||
case reflect.Map:
|
||||
for _, k := range f.MapKeys() {
|
||||
e := f.MapIndex(k)
|
||||
if e.IsNil() {
|
||||
continue
|
||||
}
|
||||
setDefaults(e, recur, zeros)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
// defaults maps a protocol buffer struct type to a slice of the fields,
|
||||
// with its scalar fields set to their proto-declared non-zero default values.
|
||||
defaultMu sync.RWMutex
|
||||
defaults = make(map[reflect.Type]defaultMessage)
|
||||
|
||||
int32PtrType = reflect.TypeOf((*int32)(nil))
|
||||
)
|
||||
|
||||
// defaultMessage represents information about the default values of a message.
|
||||
type defaultMessage struct {
|
||||
scalars []scalarField
|
||||
nested []int // struct field index of nested messages
|
||||
}
|
||||
|
||||
type scalarField struct {
|
||||
index int // struct field index
|
||||
kind reflect.Kind // element type (the T in *T or []T)
|
||||
value interface{} // the proto-declared default value, or nil
|
||||
}
|
||||
|
||||
// t is a struct type.
|
||||
func buildDefaultMessage(t reflect.Type) (dm defaultMessage) {
|
||||
sprop := GetProperties(t)
|
||||
for _, prop := range sprop.Prop {
|
||||
fi, ok := sprop.decoderTags.get(prop.Tag)
|
||||
if !ok {
|
||||
// XXX_unrecognized
|
||||
continue
|
||||
}
|
||||
ft := t.Field(fi).Type
|
||||
|
||||
sf, nested, err := fieldDefault(ft, prop)
|
||||
switch {
|
||||
case err != nil:
|
||||
log.Print(err)
|
||||
case nested:
|
||||
dm.nested = append(dm.nested, fi)
|
||||
case sf != nil:
|
||||
sf.index = fi
|
||||
dm.scalars = append(dm.scalars, *sf)
|
||||
}
|
||||
}
|
||||
|
||||
return dm
|
||||
}
|
||||
|
||||
// fieldDefault returns the scalarField for field type ft.
|
||||
// sf will be nil if the field can not have a default.
|
||||
// nestedMessage will be true if this is a nested message.
|
||||
// Note that sf.index is not set on return.
|
||||
func fieldDefault(ft reflect.Type, prop *Properties) (sf *scalarField, nestedMessage bool, err error) {
|
||||
var canHaveDefault bool
|
||||
switch ft.Kind() {
|
||||
case reflect.Ptr:
|
||||
if ft.Elem().Kind() == reflect.Struct {
|
||||
nestedMessage = true
|
||||
} else {
|
||||
canHaveDefault = true // proto2 scalar field
|
||||
}
|
||||
|
||||
case reflect.Slice:
|
||||
switch ft.Elem().Kind() {
|
||||
case reflect.Ptr:
|
||||
nestedMessage = true // repeated message
|
||||
case reflect.Uint8:
|
||||
canHaveDefault = true // bytes field
|
||||
}
|
||||
|
||||
case reflect.Map:
|
||||
if ft.Elem().Kind() == reflect.Ptr {
|
||||
nestedMessage = true // map with message values
|
||||
}
|
||||
}
|
||||
|
||||
if !canHaveDefault {
|
||||
if nestedMessage {
|
||||
return nil, true, nil
|
||||
}
|
||||
return nil, false, nil
|
||||
}
|
||||
|
||||
// We now know that ft is a pointer or slice.
|
||||
sf = &scalarField{kind: ft.Elem().Kind()}
|
||||
|
||||
// scalar fields without defaults
|
||||
if !prop.HasDefault {
|
||||
return sf, false, nil
|
||||
}
|
||||
|
||||
// a scalar field: either *T or []byte
|
||||
switch ft.Elem().Kind() {
|
||||
case reflect.Bool:
|
||||
x, err := strconv.ParseBool(prop.Default)
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("proto: bad default bool %q: %v", prop.Default, err)
|
||||
}
|
||||
sf.value = x
|
||||
case reflect.Float32:
|
||||
x, err := strconv.ParseFloat(prop.Default, 32)
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("proto: bad default float32 %q: %v", prop.Default, err)
|
||||
}
|
||||
sf.value = float32(x)
|
||||
case reflect.Float64:
|
||||
x, err := strconv.ParseFloat(prop.Default, 64)
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("proto: bad default float64 %q: %v", prop.Default, err)
|
||||
}
|
||||
sf.value = x
|
||||
case reflect.Int32:
|
||||
x, err := strconv.ParseInt(prop.Default, 10, 32)
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("proto: bad default int32 %q: %v", prop.Default, err)
|
||||
}
|
||||
sf.value = int32(x)
|
||||
case reflect.Int64:
|
||||
x, err := strconv.ParseInt(prop.Default, 10, 64)
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("proto: bad default int64 %q: %v", prop.Default, err)
|
||||
}
|
||||
sf.value = x
|
||||
case reflect.String:
|
||||
sf.value = prop.Default
|
||||
case reflect.Uint8:
|
||||
// []byte (not *uint8)
|
||||
sf.value = []byte(prop.Default)
|
||||
case reflect.Uint32:
|
||||
x, err := strconv.ParseUint(prop.Default, 10, 32)
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("proto: bad default uint32 %q: %v", prop.Default, err)
|
||||
}
|
||||
sf.value = uint32(x)
|
||||
case reflect.Uint64:
|
||||
x, err := strconv.ParseUint(prop.Default, 10, 64)
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("proto: bad default uint64 %q: %v", prop.Default, err)
|
||||
}
|
||||
sf.value = x
|
||||
default:
|
||||
return nil, false, fmt.Errorf("proto: unhandled def kind %v", ft.Elem().Kind())
|
||||
}
|
||||
|
||||
return sf, false, nil
|
||||
}
|
||||
|
||||
// Map fields may have key types of non-float scalars, strings and enums.
|
||||
// The easiest way to sort them in some deterministic order is to use fmt.
|
||||
// If this turns out to be inefficient we can always consider other options,
|
||||
// such as doing a Schwartzian transform.
|
||||
|
||||
func mapKeys(vs []reflect.Value) sort.Interface {
|
||||
s := mapKeySorter{
|
||||
vs: vs,
|
||||
// default Less function: textual comparison
|
||||
less: func(a, b reflect.Value) bool {
|
||||
return fmt.Sprint(a.Interface()) < fmt.Sprint(b.Interface())
|
||||
},
|
||||
}
|
||||
|
||||
// Type specialization per https://developers.google.com/protocol-buffers/docs/proto#maps;
|
||||
// numeric keys are sorted numerically.
|
||||
if len(vs) == 0 {
|
||||
return s
|
||||
}
|
||||
switch vs[0].Kind() {
|
||||
case reflect.Int32, reflect.Int64:
|
||||
s.less = func(a, b reflect.Value) bool { return a.Int() < b.Int() }
|
||||
case reflect.Uint32, reflect.Uint64:
|
||||
s.less = func(a, b reflect.Value) bool { return a.Uint() < b.Uint() }
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
type mapKeySorter struct {
|
||||
vs []reflect.Value
|
||||
less func(a, b reflect.Value) bool
|
||||
}
|
||||
|
||||
func (s mapKeySorter) Len() int { return len(s.vs) }
|
||||
func (s mapKeySorter) Swap(i, j int) { s.vs[i], s.vs[j] = s.vs[j], s.vs[i] }
|
||||
func (s mapKeySorter) Less(i, j int) bool {
|
||||
return s.less(s.vs[i], s.vs[j])
|
||||
}
|
||||
|
||||
// isProto3Zero reports whether v is a zero proto3 value.
|
||||
func isProto3Zero(v reflect.Value) bool {
|
||||
switch v.Kind() {
|
||||
case reflect.Bool:
|
||||
return !v.Bool()
|
||||
case reflect.Int32, reflect.Int64:
|
||||
return v.Int() == 0
|
||||
case reflect.Uint32, reflect.Uint64:
|
||||
return v.Uint() == 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v.Float() == 0
|
||||
case reflect.String:
|
||||
return v.String() == ""
|
||||
}
|
||||
return false
|
||||
}
|
||||
287
Godeps/_workspace/src/github.com/golang/protobuf/proto/message_set.go
generated
vendored
Normal file
287
Godeps/_workspace/src/github.com/golang/protobuf/proto/message_set.go
generated
vendored
Normal file
@@ -0,0 +1,287 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package proto
|
||||
|
||||
/*
|
||||
* Support for message sets.
|
||||
*/
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// ErrNoMessageTypeId occurs when a protocol buffer does not have a message type ID.
|
||||
// A message type ID is required for storing a protocol buffer in a message set.
|
||||
var ErrNoMessageTypeId = errors.New("proto does not have a message type ID")
|
||||
|
||||
// The first two types (_MessageSet_Item and MessageSet)
|
||||
// model what the protocol compiler produces for the following protocol message:
|
||||
// message MessageSet {
|
||||
// repeated group Item = 1 {
|
||||
// required int32 type_id = 2;
|
||||
// required string message = 3;
|
||||
// };
|
||||
// }
|
||||
// That is the MessageSet wire format. We can't use a proto to generate these
|
||||
// because that would introduce a circular dependency between it and this package.
|
||||
//
|
||||
// When a proto1 proto has a field that looks like:
|
||||
// optional message<MessageSet> info = 3;
|
||||
// the protocol compiler produces a field in the generated struct that looks like:
|
||||
// Info *_proto_.MessageSet `protobuf:"bytes,3,opt,name=info"`
|
||||
// The package is automatically inserted so there is no need for that proto file to
|
||||
// import this package.
|
||||
|
||||
type _MessageSet_Item struct {
|
||||
TypeId *int32 `protobuf:"varint,2,req,name=type_id"`
|
||||
Message []byte `protobuf:"bytes,3,req,name=message"`
|
||||
}
|
||||
|
||||
type MessageSet struct {
|
||||
Item []*_MessageSet_Item `protobuf:"group,1,rep"`
|
||||
XXX_unrecognized []byte
|
||||
// TODO: caching?
|
||||
}
|
||||
|
||||
// Make sure MessageSet is a Message.
|
||||
var _ Message = (*MessageSet)(nil)
|
||||
|
||||
// messageTypeIder is an interface satisfied by a protocol buffer type
|
||||
// that may be stored in a MessageSet.
|
||||
type messageTypeIder interface {
|
||||
MessageTypeId() int32
|
||||
}
|
||||
|
||||
func (ms *MessageSet) find(pb Message) *_MessageSet_Item {
|
||||
mti, ok := pb.(messageTypeIder)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
id := mti.MessageTypeId()
|
||||
for _, item := range ms.Item {
|
||||
if *item.TypeId == id {
|
||||
return item
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ms *MessageSet) Has(pb Message) bool {
|
||||
if ms.find(pb) != nil {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (ms *MessageSet) Unmarshal(pb Message) error {
|
||||
if item := ms.find(pb); item != nil {
|
||||
return Unmarshal(item.Message, pb)
|
||||
}
|
||||
if _, ok := pb.(messageTypeIder); !ok {
|
||||
return ErrNoMessageTypeId
|
||||
}
|
||||
return nil // TODO: return error instead?
|
||||
}
|
||||
|
||||
func (ms *MessageSet) Marshal(pb Message) error {
|
||||
msg, err := Marshal(pb)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if item := ms.find(pb); item != nil {
|
||||
// reuse existing item
|
||||
item.Message = msg
|
||||
return nil
|
||||
}
|
||||
|
||||
mti, ok := pb.(messageTypeIder)
|
||||
if !ok {
|
||||
return ErrNoMessageTypeId
|
||||
}
|
||||
|
||||
mtid := mti.MessageTypeId()
|
||||
ms.Item = append(ms.Item, &_MessageSet_Item{
|
||||
TypeId: &mtid,
|
||||
Message: msg,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ms *MessageSet) Reset() { *ms = MessageSet{} }
|
||||
func (ms *MessageSet) String() string { return CompactTextString(ms) }
|
||||
func (*MessageSet) ProtoMessage() {}
|
||||
|
||||
// Support for the message_set_wire_format message option.
|
||||
|
||||
func skipVarint(buf []byte) []byte {
|
||||
i := 0
|
||||
for ; buf[i]&0x80 != 0; i++ {
|
||||
}
|
||||
return buf[i+1:]
|
||||
}
|
||||
|
||||
// MarshalMessageSet encodes the extension map represented by m in the message set wire format.
|
||||
// It is called by generated Marshal methods on protocol buffer messages with the message_set_wire_format option.
|
||||
func MarshalMessageSet(m map[int32]Extension) ([]byte, error) {
|
||||
if err := encodeExtensionMap(m); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Sort extension IDs to provide a deterministic encoding.
|
||||
// See also enc_map in encode.go.
|
||||
ids := make([]int, 0, len(m))
|
||||
for id := range m {
|
||||
ids = append(ids, int(id))
|
||||
}
|
||||
sort.Ints(ids)
|
||||
|
||||
ms := &MessageSet{Item: make([]*_MessageSet_Item, 0, len(m))}
|
||||
for _, id := range ids {
|
||||
e := m[int32(id)]
|
||||
// Remove the wire type and field number varint, as well as the length varint.
|
||||
msg := skipVarint(skipVarint(e.enc))
|
||||
|
||||
ms.Item = append(ms.Item, &_MessageSet_Item{
|
||||
TypeId: Int32(int32(id)),
|
||||
Message: msg,
|
||||
})
|
||||
}
|
||||
return Marshal(ms)
|
||||
}
|
||||
|
||||
// UnmarshalMessageSet decodes the extension map encoded in buf in the message set wire format.
|
||||
// It is called by generated Unmarshal methods on protocol buffer messages with the message_set_wire_format option.
|
||||
func UnmarshalMessageSet(buf []byte, m map[int32]Extension) error {
|
||||
ms := new(MessageSet)
|
||||
if err := Unmarshal(buf, ms); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, item := range ms.Item {
|
||||
id := *item.TypeId
|
||||
msg := item.Message
|
||||
|
||||
// Restore wire type and field number varint, plus length varint.
|
||||
// Be careful to preserve duplicate items.
|
||||
b := EncodeVarint(uint64(id)<<3 | WireBytes)
|
||||
if ext, ok := m[id]; ok {
|
||||
// Existing data; rip off the tag and length varint
|
||||
// so we join the new data correctly.
|
||||
// We can assume that ext.enc is set because we are unmarshaling.
|
||||
o := ext.enc[len(b):] // skip wire type and field number
|
||||
_, n := DecodeVarint(o) // calculate length of length varint
|
||||
o = o[n:] // skip length varint
|
||||
msg = append(o, msg...) // join old data and new data
|
||||
}
|
||||
b = append(b, EncodeVarint(uint64(len(msg)))...)
|
||||
b = append(b, msg...)
|
||||
|
||||
m[id] = Extension{enc: b}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalMessageSetJSON encodes the extension map represented by m in JSON format.
|
||||
// It is called by generated MarshalJSON methods on protocol buffer messages with the message_set_wire_format option.
|
||||
func MarshalMessageSetJSON(m map[int32]Extension) ([]byte, error) {
|
||||
var b bytes.Buffer
|
||||
b.WriteByte('{')
|
||||
|
||||
// Process the map in key order for deterministic output.
|
||||
ids := make([]int32, 0, len(m))
|
||||
for id := range m {
|
||||
ids = append(ids, id)
|
||||
}
|
||||
sort.Sort(int32Slice(ids)) // int32Slice defined in text.go
|
||||
|
||||
for i, id := range ids {
|
||||
ext := m[id]
|
||||
if i > 0 {
|
||||
b.WriteByte(',')
|
||||
}
|
||||
|
||||
msd, ok := messageSetMap[id]
|
||||
if !ok {
|
||||
// Unknown type; we can't render it, so skip it.
|
||||
continue
|
||||
}
|
||||
fmt.Fprintf(&b, `"[%s]":`, msd.name)
|
||||
|
||||
x := ext.value
|
||||
if x == nil {
|
||||
x = reflect.New(msd.t.Elem()).Interface()
|
||||
if err := Unmarshal(ext.enc, x.(Message)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
d, err := json.Marshal(x)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
b.Write(d)
|
||||
}
|
||||
b.WriteByte('}')
|
||||
return b.Bytes(), nil
|
||||
}
|
||||
|
||||
// UnmarshalMessageSetJSON decodes the extension map encoded in buf in JSON format.
|
||||
// It is called by generated UnmarshalJSON methods on protocol buffer messages with the message_set_wire_format option.
|
||||
func UnmarshalMessageSetJSON(buf []byte, m map[int32]Extension) error {
|
||||
// Common-case fast path.
|
||||
if len(buf) == 0 || bytes.Equal(buf, []byte("{}")) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// This is fairly tricky, and it's not clear that it is needed.
|
||||
return errors.New("TODO: UnmarshalMessageSetJSON not yet implemented")
|
||||
}
|
||||
|
||||
// A global registry of types that can be used in a MessageSet.
|
||||
|
||||
var messageSetMap = make(map[int32]messageSetDesc)
|
||||
|
||||
type messageSetDesc struct {
|
||||
t reflect.Type // pointer to struct
|
||||
name string
|
||||
}
|
||||
|
||||
// RegisterMessageSetType is called from the generated code.
|
||||
func RegisterMessageSetType(m Message, fieldNum int32, name string) {
|
||||
messageSetMap[fieldNum] = messageSetDesc{
|
||||
t: reflect.TypeOf(m),
|
||||
name: name,
|
||||
}
|
||||
}
|
||||
479
Godeps/_workspace/src/github.com/golang/protobuf/proto/pointer_reflect.go
generated
vendored
Normal file
479
Godeps/_workspace/src/github.com/golang/protobuf/proto/pointer_reflect.go
generated
vendored
Normal file
@@ -0,0 +1,479 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// +build appengine
|
||||
|
||||
// This file contains an implementation of proto field accesses using package reflect.
|
||||
// It is slower than the code in pointer_unsafe.go but it avoids package unsafe and can
|
||||
// be used on App Engine.
|
||||
|
||||
package proto
|
||||
|
||||
import (
|
||||
"math"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// A structPointer is a pointer to a struct.
|
||||
type structPointer struct {
|
||||
v reflect.Value
|
||||
}
|
||||
|
||||
// toStructPointer returns a structPointer equivalent to the given reflect value.
|
||||
// The reflect value must itself be a pointer to a struct.
|
||||
func toStructPointer(v reflect.Value) structPointer {
|
||||
return structPointer{v}
|
||||
}
|
||||
|
||||
// IsNil reports whether p is nil.
|
||||
func structPointer_IsNil(p structPointer) bool {
|
||||
return p.v.IsNil()
|
||||
}
|
||||
|
||||
// Interface returns the struct pointer as an interface value.
|
||||
func structPointer_Interface(p structPointer, _ reflect.Type) interface{} {
|
||||
return p.v.Interface()
|
||||
}
|
||||
|
||||
// A field identifies a field in a struct, accessible from a structPointer.
|
||||
// In this implementation, a field is identified by the sequence of field indices
|
||||
// passed to reflect's FieldByIndex.
|
||||
type field []int
|
||||
|
||||
// toField returns a field equivalent to the given reflect field.
|
||||
func toField(f *reflect.StructField) field {
|
||||
return f.Index
|
||||
}
|
||||
|
||||
// invalidField is an invalid field identifier.
|
||||
var invalidField = field(nil)
|
||||
|
||||
// IsValid reports whether the field identifier is valid.
|
||||
func (f field) IsValid() bool { return f != nil }
|
||||
|
||||
// field returns the given field in the struct as a reflect value.
|
||||
func structPointer_field(p structPointer, f field) reflect.Value {
|
||||
// Special case: an extension map entry with a value of type T
|
||||
// passes a *T to the struct-handling code with a zero field,
|
||||
// expecting that it will be treated as equivalent to *struct{ X T },
|
||||
// which has the same memory layout. We have to handle that case
|
||||
// specially, because reflect will panic if we call FieldByIndex on a
|
||||
// non-struct.
|
||||
if f == nil {
|
||||
return p.v.Elem()
|
||||
}
|
||||
|
||||
return p.v.Elem().FieldByIndex(f)
|
||||
}
|
||||
|
||||
// ifield returns the given field in the struct as an interface value.
|
||||
func structPointer_ifield(p structPointer, f field) interface{} {
|
||||
return structPointer_field(p, f).Addr().Interface()
|
||||
}
|
||||
|
||||
// Bytes returns the address of a []byte field in the struct.
|
||||
func structPointer_Bytes(p structPointer, f field) *[]byte {
|
||||
return structPointer_ifield(p, f).(*[]byte)
|
||||
}
|
||||
|
||||
// BytesSlice returns the address of a [][]byte field in the struct.
|
||||
func structPointer_BytesSlice(p structPointer, f field) *[][]byte {
|
||||
return structPointer_ifield(p, f).(*[][]byte)
|
||||
}
|
||||
|
||||
// Bool returns the address of a *bool field in the struct.
|
||||
func structPointer_Bool(p structPointer, f field) **bool {
|
||||
return structPointer_ifield(p, f).(**bool)
|
||||
}
|
||||
|
||||
// BoolVal returns the address of a bool field in the struct.
|
||||
func structPointer_BoolVal(p structPointer, f field) *bool {
|
||||
return structPointer_ifield(p, f).(*bool)
|
||||
}
|
||||
|
||||
// BoolSlice returns the address of a []bool field in the struct.
|
||||
func structPointer_BoolSlice(p structPointer, f field) *[]bool {
|
||||
return structPointer_ifield(p, f).(*[]bool)
|
||||
}
|
||||
|
||||
// String returns the address of a *string field in the struct.
|
||||
func structPointer_String(p structPointer, f field) **string {
|
||||
return structPointer_ifield(p, f).(**string)
|
||||
}
|
||||
|
||||
// StringVal returns the address of a string field in the struct.
|
||||
func structPointer_StringVal(p structPointer, f field) *string {
|
||||
return structPointer_ifield(p, f).(*string)
|
||||
}
|
||||
|
||||
// StringSlice returns the address of a []string field in the struct.
|
||||
func structPointer_StringSlice(p structPointer, f field) *[]string {
|
||||
return structPointer_ifield(p, f).(*[]string)
|
||||
}
|
||||
|
||||
// ExtMap returns the address of an extension map field in the struct.
|
||||
func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension {
|
||||
return structPointer_ifield(p, f).(*map[int32]Extension)
|
||||
}
|
||||
|
||||
// NewAt returns the reflect.Value for a pointer to a field in the struct.
|
||||
func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value {
|
||||
return structPointer_field(p, f).Addr()
|
||||
}
|
||||
|
||||
// SetStructPointer writes a *struct field in the struct.
|
||||
func structPointer_SetStructPointer(p structPointer, f field, q structPointer) {
|
||||
structPointer_field(p, f).Set(q.v)
|
||||
}
|
||||
|
||||
// GetStructPointer reads a *struct field in the struct.
|
||||
func structPointer_GetStructPointer(p structPointer, f field) structPointer {
|
||||
return structPointer{structPointer_field(p, f)}
|
||||
}
|
||||
|
||||
// StructPointerSlice the address of a []*struct field in the struct.
|
||||
func structPointer_StructPointerSlice(p structPointer, f field) structPointerSlice {
|
||||
return structPointerSlice{structPointer_field(p, f)}
|
||||
}
|
||||
|
||||
// A structPointerSlice represents the address of a slice of pointers to structs
|
||||
// (themselves messages or groups). That is, v.Type() is *[]*struct{...}.
|
||||
type structPointerSlice struct {
|
||||
v reflect.Value
|
||||
}
|
||||
|
||||
func (p structPointerSlice) Len() int { return p.v.Len() }
|
||||
func (p structPointerSlice) Index(i int) structPointer { return structPointer{p.v.Index(i)} }
|
||||
func (p structPointerSlice) Append(q structPointer) {
|
||||
p.v.Set(reflect.Append(p.v, q.v))
|
||||
}
|
||||
|
||||
var (
|
||||
int32Type = reflect.TypeOf(int32(0))
|
||||
uint32Type = reflect.TypeOf(uint32(0))
|
||||
float32Type = reflect.TypeOf(float32(0))
|
||||
int64Type = reflect.TypeOf(int64(0))
|
||||
uint64Type = reflect.TypeOf(uint64(0))
|
||||
float64Type = reflect.TypeOf(float64(0))
|
||||
)
|
||||
|
||||
// A word32 represents a field of type *int32, *uint32, *float32, or *enum.
|
||||
// That is, v.Type() is *int32, *uint32, *float32, or *enum and v is assignable.
|
||||
type word32 struct {
|
||||
v reflect.Value
|
||||
}
|
||||
|
||||
// IsNil reports whether p is nil.
|
||||
func word32_IsNil(p word32) bool {
|
||||
return p.v.IsNil()
|
||||
}
|
||||
|
||||
// Set sets p to point at a newly allocated word with bits set to x.
|
||||
func word32_Set(p word32, o *Buffer, x uint32) {
|
||||
t := p.v.Type().Elem()
|
||||
switch t {
|
||||
case int32Type:
|
||||
if len(o.int32s) == 0 {
|
||||
o.int32s = make([]int32, uint32PoolSize)
|
||||
}
|
||||
o.int32s[0] = int32(x)
|
||||
p.v.Set(reflect.ValueOf(&o.int32s[0]))
|
||||
o.int32s = o.int32s[1:]
|
||||
return
|
||||
case uint32Type:
|
||||
if len(o.uint32s) == 0 {
|
||||
o.uint32s = make([]uint32, uint32PoolSize)
|
||||
}
|
||||
o.uint32s[0] = x
|
||||
p.v.Set(reflect.ValueOf(&o.uint32s[0]))
|
||||
o.uint32s = o.uint32s[1:]
|
||||
return
|
||||
case float32Type:
|
||||
if len(o.float32s) == 0 {
|
||||
o.float32s = make([]float32, uint32PoolSize)
|
||||
}
|
||||
o.float32s[0] = math.Float32frombits(x)
|
||||
p.v.Set(reflect.ValueOf(&o.float32s[0]))
|
||||
o.float32s = o.float32s[1:]
|
||||
return
|
||||
}
|
||||
|
||||
// must be enum
|
||||
p.v.Set(reflect.New(t))
|
||||
p.v.Elem().SetInt(int64(int32(x)))
|
||||
}
|
||||
|
||||
// Get gets the bits pointed at by p, as a uint32.
|
||||
func word32_Get(p word32) uint32 {
|
||||
elem := p.v.Elem()
|
||||
switch elem.Kind() {
|
||||
case reflect.Int32:
|
||||
return uint32(elem.Int())
|
||||
case reflect.Uint32:
|
||||
return uint32(elem.Uint())
|
||||
case reflect.Float32:
|
||||
return math.Float32bits(float32(elem.Float()))
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// Word32 returns a reference to a *int32, *uint32, *float32, or *enum field in the struct.
|
||||
func structPointer_Word32(p structPointer, f field) word32 {
|
||||
return word32{structPointer_field(p, f)}
|
||||
}
|
||||
|
||||
// A word32Val represents a field of type int32, uint32, float32, or enum.
|
||||
// That is, v.Type() is int32, uint32, float32, or enum and v is assignable.
|
||||
type word32Val struct {
|
||||
v reflect.Value
|
||||
}
|
||||
|
||||
// Set sets *p to x.
|
||||
func word32Val_Set(p word32Val, x uint32) {
|
||||
switch p.v.Type() {
|
||||
case int32Type:
|
||||
p.v.SetInt(int64(x))
|
||||
return
|
||||
case uint32Type:
|
||||
p.v.SetUint(uint64(x))
|
||||
return
|
||||
case float32Type:
|
||||
p.v.SetFloat(float64(math.Float32frombits(x)))
|
||||
return
|
||||
}
|
||||
|
||||
// must be enum
|
||||
p.v.SetInt(int64(int32(x)))
|
||||
}
|
||||
|
||||
// Get gets the bits pointed at by p, as a uint32.
|
||||
func word32Val_Get(p word32Val) uint32 {
|
||||
elem := p.v
|
||||
switch elem.Kind() {
|
||||
case reflect.Int32:
|
||||
return uint32(elem.Int())
|
||||
case reflect.Uint32:
|
||||
return uint32(elem.Uint())
|
||||
case reflect.Float32:
|
||||
return math.Float32bits(float32(elem.Float()))
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// Word32Val returns a reference to a int32, uint32, float32, or enum field in the struct.
|
||||
func structPointer_Word32Val(p structPointer, f field) word32Val {
|
||||
return word32Val{structPointer_field(p, f)}
|
||||
}
|
||||
|
||||
// A word32Slice is a slice of 32-bit values.
|
||||
// That is, v.Type() is []int32, []uint32, []float32, or []enum.
|
||||
type word32Slice struct {
|
||||
v reflect.Value
|
||||
}
|
||||
|
||||
func (p word32Slice) Append(x uint32) {
|
||||
n, m := p.v.Len(), p.v.Cap()
|
||||
if n < m {
|
||||
p.v.SetLen(n + 1)
|
||||
} else {
|
||||
t := p.v.Type().Elem()
|
||||
p.v.Set(reflect.Append(p.v, reflect.Zero(t)))
|
||||
}
|
||||
elem := p.v.Index(n)
|
||||
switch elem.Kind() {
|
||||
case reflect.Int32:
|
||||
elem.SetInt(int64(int32(x)))
|
||||
case reflect.Uint32:
|
||||
elem.SetUint(uint64(x))
|
||||
case reflect.Float32:
|
||||
elem.SetFloat(float64(math.Float32frombits(x)))
|
||||
}
|
||||
}
|
||||
|
||||
func (p word32Slice) Len() int {
|
||||
return p.v.Len()
|
||||
}
|
||||
|
||||
func (p word32Slice) Index(i int) uint32 {
|
||||
elem := p.v.Index(i)
|
||||
switch elem.Kind() {
|
||||
case reflect.Int32:
|
||||
return uint32(elem.Int())
|
||||
case reflect.Uint32:
|
||||
return uint32(elem.Uint())
|
||||
case reflect.Float32:
|
||||
return math.Float32bits(float32(elem.Float()))
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// Word32Slice returns a reference to a []int32, []uint32, []float32, or []enum field in the struct.
|
||||
func structPointer_Word32Slice(p structPointer, f field) word32Slice {
|
||||
return word32Slice{structPointer_field(p, f)}
|
||||
}
|
||||
|
||||
// word64 is like word32 but for 64-bit values.
|
||||
type word64 struct {
|
||||
v reflect.Value
|
||||
}
|
||||
|
||||
func word64_Set(p word64, o *Buffer, x uint64) {
|
||||
t := p.v.Type().Elem()
|
||||
switch t {
|
||||
case int64Type:
|
||||
if len(o.int64s) == 0 {
|
||||
o.int64s = make([]int64, uint64PoolSize)
|
||||
}
|
||||
o.int64s[0] = int64(x)
|
||||
p.v.Set(reflect.ValueOf(&o.int64s[0]))
|
||||
o.int64s = o.int64s[1:]
|
||||
return
|
||||
case uint64Type:
|
||||
if len(o.uint64s) == 0 {
|
||||
o.uint64s = make([]uint64, uint64PoolSize)
|
||||
}
|
||||
o.uint64s[0] = x
|
||||
p.v.Set(reflect.ValueOf(&o.uint64s[0]))
|
||||
o.uint64s = o.uint64s[1:]
|
||||
return
|
||||
case float64Type:
|
||||
if len(o.float64s) == 0 {
|
||||
o.float64s = make([]float64, uint64PoolSize)
|
||||
}
|
||||
o.float64s[0] = math.Float64frombits(x)
|
||||
p.v.Set(reflect.ValueOf(&o.float64s[0]))
|
||||
o.float64s = o.float64s[1:]
|
||||
return
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func word64_IsNil(p word64) bool {
|
||||
return p.v.IsNil()
|
||||
}
|
||||
|
||||
func word64_Get(p word64) uint64 {
|
||||
elem := p.v.Elem()
|
||||
switch elem.Kind() {
|
||||
case reflect.Int64:
|
||||
return uint64(elem.Int())
|
||||
case reflect.Uint64:
|
||||
return elem.Uint()
|
||||
case reflect.Float64:
|
||||
return math.Float64bits(elem.Float())
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func structPointer_Word64(p structPointer, f field) word64 {
|
||||
return word64{structPointer_field(p, f)}
|
||||
}
|
||||
|
||||
// word64Val is like word32Val but for 64-bit values.
|
||||
type word64Val struct {
|
||||
v reflect.Value
|
||||
}
|
||||
|
||||
func word64Val_Set(p word64Val, o *Buffer, x uint64) {
|
||||
switch p.v.Type() {
|
||||
case int64Type:
|
||||
p.v.SetInt(int64(x))
|
||||
return
|
||||
case uint64Type:
|
||||
p.v.SetUint(x)
|
||||
return
|
||||
case float64Type:
|
||||
p.v.SetFloat(math.Float64frombits(x))
|
||||
return
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func word64Val_Get(p word64Val) uint64 {
|
||||
elem := p.v
|
||||
switch elem.Kind() {
|
||||
case reflect.Int64:
|
||||
return uint64(elem.Int())
|
||||
case reflect.Uint64:
|
||||
return elem.Uint()
|
||||
case reflect.Float64:
|
||||
return math.Float64bits(elem.Float())
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func structPointer_Word64Val(p structPointer, f field) word64Val {
|
||||
return word64Val{structPointer_field(p, f)}
|
||||
}
|
||||
|
||||
type word64Slice struct {
|
||||
v reflect.Value
|
||||
}
|
||||
|
||||
func (p word64Slice) Append(x uint64) {
|
||||
n, m := p.v.Len(), p.v.Cap()
|
||||
if n < m {
|
||||
p.v.SetLen(n + 1)
|
||||
} else {
|
||||
t := p.v.Type().Elem()
|
||||
p.v.Set(reflect.Append(p.v, reflect.Zero(t)))
|
||||
}
|
||||
elem := p.v.Index(n)
|
||||
switch elem.Kind() {
|
||||
case reflect.Int64:
|
||||
elem.SetInt(int64(int64(x)))
|
||||
case reflect.Uint64:
|
||||
elem.SetUint(uint64(x))
|
||||
case reflect.Float64:
|
||||
elem.SetFloat(float64(math.Float64frombits(x)))
|
||||
}
|
||||
}
|
||||
|
||||
func (p word64Slice) Len() int {
|
||||
return p.v.Len()
|
||||
}
|
||||
|
||||
func (p word64Slice) Index(i int) uint64 {
|
||||
elem := p.v.Index(i)
|
||||
switch elem.Kind() {
|
||||
case reflect.Int64:
|
||||
return uint64(elem.Int())
|
||||
case reflect.Uint64:
|
||||
return uint64(elem.Uint())
|
||||
case reflect.Float64:
|
||||
return math.Float64bits(float64(elem.Float()))
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func structPointer_Word64Slice(p structPointer, f field) word64Slice {
|
||||
return word64Slice{structPointer_field(p, f)}
|
||||
}
|
||||
266
Godeps/_workspace/src/github.com/golang/protobuf/proto/pointer_unsafe.go
generated
vendored
Normal file
266
Godeps/_workspace/src/github.com/golang/protobuf/proto/pointer_unsafe.go
generated
vendored
Normal file
@@ -0,0 +1,266 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2012 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// +build !appengine
|
||||
|
||||
// This file contains the implementation of the proto field accesses using package unsafe.
|
||||
|
||||
package proto
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// NOTE: These type_Foo functions would more idiomatically be methods,
|
||||
// but Go does not allow methods on pointer types, and we must preserve
|
||||
// some pointer type for the garbage collector. We use these
|
||||
// funcs with clunky names as our poor approximation to methods.
|
||||
//
|
||||
// An alternative would be
|
||||
// type structPointer struct { p unsafe.Pointer }
|
||||
// but that does not registerize as well.
|
||||
|
||||
// A structPointer is a pointer to a struct.
|
||||
type structPointer unsafe.Pointer
|
||||
|
||||
// toStructPointer returns a structPointer equivalent to the given reflect value.
|
||||
func toStructPointer(v reflect.Value) structPointer {
|
||||
return structPointer(unsafe.Pointer(v.Pointer()))
|
||||
}
|
||||
|
||||
// IsNil reports whether p is nil.
|
||||
func structPointer_IsNil(p structPointer) bool {
|
||||
return p == nil
|
||||
}
|
||||
|
||||
// Interface returns the struct pointer, assumed to have element type t,
|
||||
// as an interface value.
|
||||
func structPointer_Interface(p structPointer, t reflect.Type) interface{} {
|
||||
return reflect.NewAt(t, unsafe.Pointer(p)).Interface()
|
||||
}
|
||||
|
||||
// A field identifies a field in a struct, accessible from a structPointer.
|
||||
// In this implementation, a field is identified by its byte offset from the start of the struct.
|
||||
type field uintptr
|
||||
|
||||
// toField returns a field equivalent to the given reflect field.
|
||||
func toField(f *reflect.StructField) field {
|
||||
return field(f.Offset)
|
||||
}
|
||||
|
||||
// invalidField is an invalid field identifier.
|
||||
const invalidField = ^field(0)
|
||||
|
||||
// IsValid reports whether the field identifier is valid.
|
||||
func (f field) IsValid() bool {
|
||||
return f != ^field(0)
|
||||
}
|
||||
|
||||
// Bytes returns the address of a []byte field in the struct.
|
||||
func structPointer_Bytes(p structPointer, f field) *[]byte {
|
||||
return (*[]byte)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// BytesSlice returns the address of a [][]byte field in the struct.
|
||||
func structPointer_BytesSlice(p structPointer, f field) *[][]byte {
|
||||
return (*[][]byte)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// Bool returns the address of a *bool field in the struct.
|
||||
func structPointer_Bool(p structPointer, f field) **bool {
|
||||
return (**bool)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// BoolVal returns the address of a bool field in the struct.
|
||||
func structPointer_BoolVal(p structPointer, f field) *bool {
|
||||
return (*bool)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// BoolSlice returns the address of a []bool field in the struct.
|
||||
func structPointer_BoolSlice(p structPointer, f field) *[]bool {
|
||||
return (*[]bool)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// String returns the address of a *string field in the struct.
|
||||
func structPointer_String(p structPointer, f field) **string {
|
||||
return (**string)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// StringVal returns the address of a string field in the struct.
|
||||
func structPointer_StringVal(p structPointer, f field) *string {
|
||||
return (*string)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// StringSlice returns the address of a []string field in the struct.
|
||||
func structPointer_StringSlice(p structPointer, f field) *[]string {
|
||||
return (*[]string)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// ExtMap returns the address of an extension map field in the struct.
|
||||
func structPointer_ExtMap(p structPointer, f field) *map[int32]Extension {
|
||||
return (*map[int32]Extension)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// NewAt returns the reflect.Value for a pointer to a field in the struct.
|
||||
func structPointer_NewAt(p structPointer, f field, typ reflect.Type) reflect.Value {
|
||||
return reflect.NewAt(typ, unsafe.Pointer(uintptr(p)+uintptr(f)))
|
||||
}
|
||||
|
||||
// SetStructPointer writes a *struct field in the struct.
|
||||
func structPointer_SetStructPointer(p structPointer, f field, q structPointer) {
|
||||
*(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f))) = q
|
||||
}
|
||||
|
||||
// GetStructPointer reads a *struct field in the struct.
|
||||
func structPointer_GetStructPointer(p structPointer, f field) structPointer {
|
||||
return *(*structPointer)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// StructPointerSlice the address of a []*struct field in the struct.
|
||||
func structPointer_StructPointerSlice(p structPointer, f field) *structPointerSlice {
|
||||
return (*structPointerSlice)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// A structPointerSlice represents a slice of pointers to structs (themselves submessages or groups).
|
||||
type structPointerSlice []structPointer
|
||||
|
||||
func (v *structPointerSlice) Len() int { return len(*v) }
|
||||
func (v *structPointerSlice) Index(i int) structPointer { return (*v)[i] }
|
||||
func (v *structPointerSlice) Append(p structPointer) { *v = append(*v, p) }
|
||||
|
||||
// A word32 is the address of a "pointer to 32-bit value" field.
|
||||
type word32 **uint32
|
||||
|
||||
// IsNil reports whether *v is nil.
|
||||
func word32_IsNil(p word32) bool {
|
||||
return *p == nil
|
||||
}
|
||||
|
||||
// Set sets *v to point at a newly allocated word set to x.
|
||||
func word32_Set(p word32, o *Buffer, x uint32) {
|
||||
if len(o.uint32s) == 0 {
|
||||
o.uint32s = make([]uint32, uint32PoolSize)
|
||||
}
|
||||
o.uint32s[0] = x
|
||||
*p = &o.uint32s[0]
|
||||
o.uint32s = o.uint32s[1:]
|
||||
}
|
||||
|
||||
// Get gets the value pointed at by *v.
|
||||
func word32_Get(p word32) uint32 {
|
||||
return **p
|
||||
}
|
||||
|
||||
// Word32 returns the address of a *int32, *uint32, *float32, or *enum field in the struct.
|
||||
func structPointer_Word32(p structPointer, f field) word32 {
|
||||
return word32((**uint32)(unsafe.Pointer(uintptr(p) + uintptr(f))))
|
||||
}
|
||||
|
||||
// A word32Val is the address of a 32-bit value field.
|
||||
type word32Val *uint32
|
||||
|
||||
// Set sets *p to x.
|
||||
func word32Val_Set(p word32Val, x uint32) {
|
||||
*p = x
|
||||
}
|
||||
|
||||
// Get gets the value pointed at by p.
|
||||
func word32Val_Get(p word32Val) uint32 {
|
||||
return *p
|
||||
}
|
||||
|
||||
// Word32Val returns the address of a *int32, *uint32, *float32, or *enum field in the struct.
|
||||
func structPointer_Word32Val(p structPointer, f field) word32Val {
|
||||
return word32Val((*uint32)(unsafe.Pointer(uintptr(p) + uintptr(f))))
|
||||
}
|
||||
|
||||
// A word32Slice is a slice of 32-bit values.
|
||||
type word32Slice []uint32
|
||||
|
||||
func (v *word32Slice) Append(x uint32) { *v = append(*v, x) }
|
||||
func (v *word32Slice) Len() int { return len(*v) }
|
||||
func (v *word32Slice) Index(i int) uint32 { return (*v)[i] }
|
||||
|
||||
// Word32Slice returns the address of a []int32, []uint32, []float32, or []enum field in the struct.
|
||||
func structPointer_Word32Slice(p structPointer, f field) *word32Slice {
|
||||
return (*word32Slice)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
|
||||
// word64 is like word32 but for 64-bit values.
|
||||
type word64 **uint64
|
||||
|
||||
func word64_Set(p word64, o *Buffer, x uint64) {
|
||||
if len(o.uint64s) == 0 {
|
||||
o.uint64s = make([]uint64, uint64PoolSize)
|
||||
}
|
||||
o.uint64s[0] = x
|
||||
*p = &o.uint64s[0]
|
||||
o.uint64s = o.uint64s[1:]
|
||||
}
|
||||
|
||||
func word64_IsNil(p word64) bool {
|
||||
return *p == nil
|
||||
}
|
||||
|
||||
func word64_Get(p word64) uint64 {
|
||||
return **p
|
||||
}
|
||||
|
||||
func structPointer_Word64(p structPointer, f field) word64 {
|
||||
return word64((**uint64)(unsafe.Pointer(uintptr(p) + uintptr(f))))
|
||||
}
|
||||
|
||||
// word64Val is like word32Val but for 64-bit values.
|
||||
type word64Val *uint64
|
||||
|
||||
func word64Val_Set(p word64Val, o *Buffer, x uint64) {
|
||||
*p = x
|
||||
}
|
||||
|
||||
func word64Val_Get(p word64Val) uint64 {
|
||||
return *p
|
||||
}
|
||||
|
||||
func structPointer_Word64Val(p structPointer, f field) word64Val {
|
||||
return word64Val((*uint64)(unsafe.Pointer(uintptr(p) + uintptr(f))))
|
||||
}
|
||||
|
||||
// word64Slice is like word32Slice but for 64-bit values.
|
||||
type word64Slice []uint64
|
||||
|
||||
func (v *word64Slice) Append(x uint64) { *v = append(*v, x) }
|
||||
func (v *word64Slice) Len() int { return len(*v) }
|
||||
func (v *word64Slice) Index(i int) uint64 { return (*v)[i] }
|
||||
|
||||
func structPointer_Word64Slice(p structPointer, f field) *word64Slice {
|
||||
return (*word64Slice)(unsafe.Pointer(uintptr(p) + uintptr(f)))
|
||||
}
|
||||
811
Godeps/_workspace/src/github.com/golang/protobuf/proto/properties.go
generated
vendored
Normal file
811
Godeps/_workspace/src/github.com/golang/protobuf/proto/properties.go
generated
vendored
Normal file
@@ -0,0 +1,811 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package proto
|
||||
|
||||
/*
|
||||
* Routines for encoding data into the wire format for protocol buffers.
|
||||
*/
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
const debug bool = false
|
||||
|
||||
// Constants that identify the encoding of a value on the wire.
|
||||
const (
|
||||
WireVarint = 0
|
||||
WireFixed64 = 1
|
||||
WireBytes = 2
|
||||
WireStartGroup = 3
|
||||
WireEndGroup = 4
|
||||
WireFixed32 = 5
|
||||
)
|
||||
|
||||
const startSize = 10 // initial slice/string sizes
|
||||
|
||||
// Encoders are defined in encode.go
|
||||
// An encoder outputs the full representation of a field, including its
|
||||
// tag and encoder type.
|
||||
type encoder func(p *Buffer, prop *Properties, base structPointer) error
|
||||
|
||||
// A valueEncoder encodes a single integer in a particular encoding.
|
||||
type valueEncoder func(o *Buffer, x uint64) error
|
||||
|
||||
// Sizers are defined in encode.go
|
||||
// A sizer returns the encoded size of a field, including its tag and encoder
|
||||
// type.
|
||||
type sizer func(prop *Properties, base structPointer) int
|
||||
|
||||
// A valueSizer returns the encoded size of a single integer in a particular
|
||||
// encoding.
|
||||
type valueSizer func(x uint64) int
|
||||
|
||||
// Decoders are defined in decode.go
|
||||
// A decoder creates a value from its wire representation.
|
||||
// Unrecognized subelements are saved in unrec.
|
||||
type decoder func(p *Buffer, prop *Properties, base structPointer) error
|
||||
|
||||
// A valueDecoder decodes a single integer in a particular encoding.
|
||||
type valueDecoder func(o *Buffer) (x uint64, err error)
|
||||
|
||||
// A oneofMarshaler does the marshaling for all oneof fields in a message.
|
||||
type oneofMarshaler func(Message, *Buffer) error
|
||||
|
||||
// A oneofUnmarshaler does the unmarshaling for a oneof field in a message.
|
||||
type oneofUnmarshaler func(Message, int, int, *Buffer) (bool, error)
|
||||
|
||||
// tagMap is an optimization over map[int]int for typical protocol buffer
|
||||
// use-cases. Encoded protocol buffers are often in tag order with small tag
|
||||
// numbers.
|
||||
type tagMap struct {
|
||||
fastTags []int
|
||||
slowTags map[int]int
|
||||
}
|
||||
|
||||
// tagMapFastLimit is the upper bound on the tag number that will be stored in
|
||||
// the tagMap slice rather than its map.
|
||||
const tagMapFastLimit = 1024
|
||||
|
||||
func (p *tagMap) get(t int) (int, bool) {
|
||||
if t > 0 && t < tagMapFastLimit {
|
||||
if t >= len(p.fastTags) {
|
||||
return 0, false
|
||||
}
|
||||
fi := p.fastTags[t]
|
||||
return fi, fi >= 0
|
||||
}
|
||||
fi, ok := p.slowTags[t]
|
||||
return fi, ok
|
||||
}
|
||||
|
||||
func (p *tagMap) put(t int, fi int) {
|
||||
if t > 0 && t < tagMapFastLimit {
|
||||
for len(p.fastTags) < t+1 {
|
||||
p.fastTags = append(p.fastTags, -1)
|
||||
}
|
||||
p.fastTags[t] = fi
|
||||
return
|
||||
}
|
||||
if p.slowTags == nil {
|
||||
p.slowTags = make(map[int]int)
|
||||
}
|
||||
p.slowTags[t] = fi
|
||||
}
|
||||
|
||||
// StructProperties represents properties for all the fields of a struct.
|
||||
// decoderTags and decoderOrigNames should only be used by the decoder.
|
||||
type StructProperties struct {
|
||||
Prop []*Properties // properties for each field
|
||||
reqCount int // required count
|
||||
decoderTags tagMap // map from proto tag to struct field number
|
||||
decoderOrigNames map[string]int // map from original name to struct field number
|
||||
order []int // list of struct field numbers in tag order
|
||||
unrecField field // field id of the XXX_unrecognized []byte field
|
||||
extendable bool // is this an extendable proto
|
||||
|
||||
oneofMarshaler oneofMarshaler
|
||||
oneofUnmarshaler oneofUnmarshaler
|
||||
stype reflect.Type
|
||||
|
||||
// OneofTypes contains information about the oneof fields in this message.
|
||||
// It is keyed by the original name of a field.
|
||||
OneofTypes map[string]*OneofProperties
|
||||
}
|
||||
|
||||
// OneofProperties represents information about a specific field in a oneof.
|
||||
type OneofProperties struct {
|
||||
Type reflect.Type // pointer to generated struct type for this oneof field
|
||||
Field int // struct field number of the containing oneof in the message
|
||||
Prop *Properties
|
||||
}
|
||||
|
||||
// Implement the sorting interface so we can sort the fields in tag order, as recommended by the spec.
|
||||
// See encode.go, (*Buffer).enc_struct.
|
||||
|
||||
func (sp *StructProperties) Len() int { return len(sp.order) }
|
||||
func (sp *StructProperties) Less(i, j int) bool {
|
||||
return sp.Prop[sp.order[i]].Tag < sp.Prop[sp.order[j]].Tag
|
||||
}
|
||||
func (sp *StructProperties) Swap(i, j int) { sp.order[i], sp.order[j] = sp.order[j], sp.order[i] }
|
||||
|
||||
// Properties represents the protocol-specific behavior of a single struct field.
|
||||
type Properties struct {
|
||||
Name string // name of the field, for error messages
|
||||
OrigName string // original name before protocol compiler (always set)
|
||||
Wire string
|
||||
WireType int
|
||||
Tag int
|
||||
Required bool
|
||||
Optional bool
|
||||
Repeated bool
|
||||
Packed bool // relevant for repeated primitives only
|
||||
Enum string // set for enum types only
|
||||
proto3 bool // whether this is known to be a proto3 field; set for []byte only
|
||||
oneof bool // whether this is a oneof field
|
||||
|
||||
Default string // default value
|
||||
HasDefault bool // whether an explicit default was provided
|
||||
def_uint64 uint64
|
||||
|
||||
enc encoder
|
||||
valEnc valueEncoder // set for bool and numeric types only
|
||||
field field
|
||||
tagcode []byte // encoding of EncodeVarint((Tag<<3)|WireType)
|
||||
tagbuf [8]byte
|
||||
stype reflect.Type // set for struct types only
|
||||
sprop *StructProperties // set for struct types only
|
||||
isMarshaler bool
|
||||
isUnmarshaler bool
|
||||
|
||||
mtype reflect.Type // set for map types only
|
||||
mkeyprop *Properties // set for map types only
|
||||
mvalprop *Properties // set for map types only
|
||||
|
||||
size sizer
|
||||
valSize valueSizer // set for bool and numeric types only
|
||||
|
||||
dec decoder
|
||||
valDec valueDecoder // set for bool and numeric types only
|
||||
|
||||
// If this is a packable field, this will be the decoder for the packed version of the field.
|
||||
packedDec decoder
|
||||
}
|
||||
|
||||
// String formats the properties in the protobuf struct field tag style.
|
||||
func (p *Properties) String() string {
|
||||
s := p.Wire
|
||||
s = ","
|
||||
s += strconv.Itoa(p.Tag)
|
||||
if p.Required {
|
||||
s += ",req"
|
||||
}
|
||||
if p.Optional {
|
||||
s += ",opt"
|
||||
}
|
||||
if p.Repeated {
|
||||
s += ",rep"
|
||||
}
|
||||
if p.Packed {
|
||||
s += ",packed"
|
||||
}
|
||||
if p.OrigName != p.Name {
|
||||
s += ",name=" + p.OrigName
|
||||
}
|
||||
if p.proto3 {
|
||||
s += ",proto3"
|
||||
}
|
||||
if p.oneof {
|
||||
s += ",oneof"
|
||||
}
|
||||
if len(p.Enum) > 0 {
|
||||
s += ",enum=" + p.Enum
|
||||
}
|
||||
if p.HasDefault {
|
||||
s += ",def=" + p.Default
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Parse populates p by parsing a string in the protobuf struct field tag style.
|
||||
func (p *Properties) Parse(s string) {
|
||||
// "bytes,49,opt,name=foo,def=hello!"
|
||||
fields := strings.Split(s, ",") // breaks def=, but handled below.
|
||||
if len(fields) < 2 {
|
||||
fmt.Fprintf(os.Stderr, "proto: tag has too few fields: %q\n", s)
|
||||
return
|
||||
}
|
||||
|
||||
p.Wire = fields[0]
|
||||
switch p.Wire {
|
||||
case "varint":
|
||||
p.WireType = WireVarint
|
||||
p.valEnc = (*Buffer).EncodeVarint
|
||||
p.valDec = (*Buffer).DecodeVarint
|
||||
p.valSize = sizeVarint
|
||||
case "fixed32":
|
||||
p.WireType = WireFixed32
|
||||
p.valEnc = (*Buffer).EncodeFixed32
|
||||
p.valDec = (*Buffer).DecodeFixed32
|
||||
p.valSize = sizeFixed32
|
||||
case "fixed64":
|
||||
p.WireType = WireFixed64
|
||||
p.valEnc = (*Buffer).EncodeFixed64
|
||||
p.valDec = (*Buffer).DecodeFixed64
|
||||
p.valSize = sizeFixed64
|
||||
case "zigzag32":
|
||||
p.WireType = WireVarint
|
||||
p.valEnc = (*Buffer).EncodeZigzag32
|
||||
p.valDec = (*Buffer).DecodeZigzag32
|
||||
p.valSize = sizeZigzag32
|
||||
case "zigzag64":
|
||||
p.WireType = WireVarint
|
||||
p.valEnc = (*Buffer).EncodeZigzag64
|
||||
p.valDec = (*Buffer).DecodeZigzag64
|
||||
p.valSize = sizeZigzag64
|
||||
case "bytes", "group":
|
||||
p.WireType = WireBytes
|
||||
// no numeric converter for non-numeric types
|
||||
default:
|
||||
fmt.Fprintf(os.Stderr, "proto: tag has unknown wire type: %q\n", s)
|
||||
return
|
||||
}
|
||||
|
||||
var err error
|
||||
p.Tag, err = strconv.Atoi(fields[1])
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for i := 2; i < len(fields); i++ {
|
||||
f := fields[i]
|
||||
switch {
|
||||
case f == "req":
|
||||
p.Required = true
|
||||
case f == "opt":
|
||||
p.Optional = true
|
||||
case f == "rep":
|
||||
p.Repeated = true
|
||||
case f == "packed":
|
||||
p.Packed = true
|
||||
case strings.HasPrefix(f, "name="):
|
||||
p.OrigName = f[5:]
|
||||
case strings.HasPrefix(f, "enum="):
|
||||
p.Enum = f[5:]
|
||||
case f == "proto3":
|
||||
p.proto3 = true
|
||||
case f == "oneof":
|
||||
p.oneof = true
|
||||
case strings.HasPrefix(f, "def="):
|
||||
p.HasDefault = true
|
||||
p.Default = f[4:] // rest of string
|
||||
if i+1 < len(fields) {
|
||||
// Commas aren't escaped, and def is always last.
|
||||
p.Default += "," + strings.Join(fields[i+1:], ",")
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func logNoSliceEnc(t1, t2 reflect.Type) {
|
||||
fmt.Fprintf(os.Stderr, "proto: no slice oenc for %T = []%T\n", t1, t2)
|
||||
}
|
||||
|
||||
var protoMessageType = reflect.TypeOf((*Message)(nil)).Elem()
|
||||
|
||||
// Initialize the fields for encoding and decoding.
|
||||
func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lockGetProp bool) {
|
||||
p.enc = nil
|
||||
p.dec = nil
|
||||
p.size = nil
|
||||
|
||||
switch t1 := typ; t1.Kind() {
|
||||
default:
|
||||
fmt.Fprintf(os.Stderr, "proto: no coders for %v\n", t1)
|
||||
|
||||
// proto3 scalar types
|
||||
|
||||
case reflect.Bool:
|
||||
p.enc = (*Buffer).enc_proto3_bool
|
||||
p.dec = (*Buffer).dec_proto3_bool
|
||||
p.size = size_proto3_bool
|
||||
case reflect.Int32:
|
||||
p.enc = (*Buffer).enc_proto3_int32
|
||||
p.dec = (*Buffer).dec_proto3_int32
|
||||
p.size = size_proto3_int32
|
||||
case reflect.Uint32:
|
||||
p.enc = (*Buffer).enc_proto3_uint32
|
||||
p.dec = (*Buffer).dec_proto3_int32 // can reuse
|
||||
p.size = size_proto3_uint32
|
||||
case reflect.Int64, reflect.Uint64:
|
||||
p.enc = (*Buffer).enc_proto3_int64
|
||||
p.dec = (*Buffer).dec_proto3_int64
|
||||
p.size = size_proto3_int64
|
||||
case reflect.Float32:
|
||||
p.enc = (*Buffer).enc_proto3_uint32 // can just treat them as bits
|
||||
p.dec = (*Buffer).dec_proto3_int32
|
||||
p.size = size_proto3_uint32
|
||||
case reflect.Float64:
|
||||
p.enc = (*Buffer).enc_proto3_int64 // can just treat them as bits
|
||||
p.dec = (*Buffer).dec_proto3_int64
|
||||
p.size = size_proto3_int64
|
||||
case reflect.String:
|
||||
p.enc = (*Buffer).enc_proto3_string
|
||||
p.dec = (*Buffer).dec_proto3_string
|
||||
p.size = size_proto3_string
|
||||
|
||||
case reflect.Ptr:
|
||||
switch t2 := t1.Elem(); t2.Kind() {
|
||||
default:
|
||||
fmt.Fprintf(os.Stderr, "proto: no encoder function for %v -> %v\n", t1, t2)
|
||||
break
|
||||
case reflect.Bool:
|
||||
p.enc = (*Buffer).enc_bool
|
||||
p.dec = (*Buffer).dec_bool
|
||||
p.size = size_bool
|
||||
case reflect.Int32:
|
||||
p.enc = (*Buffer).enc_int32
|
||||
p.dec = (*Buffer).dec_int32
|
||||
p.size = size_int32
|
||||
case reflect.Uint32:
|
||||
p.enc = (*Buffer).enc_uint32
|
||||
p.dec = (*Buffer).dec_int32 // can reuse
|
||||
p.size = size_uint32
|
||||
case reflect.Int64, reflect.Uint64:
|
||||
p.enc = (*Buffer).enc_int64
|
||||
p.dec = (*Buffer).dec_int64
|
||||
p.size = size_int64
|
||||
case reflect.Float32:
|
||||
p.enc = (*Buffer).enc_uint32 // can just treat them as bits
|
||||
p.dec = (*Buffer).dec_int32
|
||||
p.size = size_uint32
|
||||
case reflect.Float64:
|
||||
p.enc = (*Buffer).enc_int64 // can just treat them as bits
|
||||
p.dec = (*Buffer).dec_int64
|
||||
p.size = size_int64
|
||||
case reflect.String:
|
||||
p.enc = (*Buffer).enc_string
|
||||
p.dec = (*Buffer).dec_string
|
||||
p.size = size_string
|
||||
case reflect.Struct:
|
||||
p.stype = t1.Elem()
|
||||
p.isMarshaler = isMarshaler(t1)
|
||||
p.isUnmarshaler = isUnmarshaler(t1)
|
||||
if p.Wire == "bytes" {
|
||||
p.enc = (*Buffer).enc_struct_message
|
||||
p.dec = (*Buffer).dec_struct_message
|
||||
p.size = size_struct_message
|
||||
} else {
|
||||
p.enc = (*Buffer).enc_struct_group
|
||||
p.dec = (*Buffer).dec_struct_group
|
||||
p.size = size_struct_group
|
||||
}
|
||||
}
|
||||
|
||||
case reflect.Slice:
|
||||
switch t2 := t1.Elem(); t2.Kind() {
|
||||
default:
|
||||
logNoSliceEnc(t1, t2)
|
||||
break
|
||||
case reflect.Bool:
|
||||
if p.Packed {
|
||||
p.enc = (*Buffer).enc_slice_packed_bool
|
||||
p.size = size_slice_packed_bool
|
||||
} else {
|
||||
p.enc = (*Buffer).enc_slice_bool
|
||||
p.size = size_slice_bool
|
||||
}
|
||||
p.dec = (*Buffer).dec_slice_bool
|
||||
p.packedDec = (*Buffer).dec_slice_packed_bool
|
||||
case reflect.Int32:
|
||||
if p.Packed {
|
||||
p.enc = (*Buffer).enc_slice_packed_int32
|
||||
p.size = size_slice_packed_int32
|
||||
} else {
|
||||
p.enc = (*Buffer).enc_slice_int32
|
||||
p.size = size_slice_int32
|
||||
}
|
||||
p.dec = (*Buffer).dec_slice_int32
|
||||
p.packedDec = (*Buffer).dec_slice_packed_int32
|
||||
case reflect.Uint32:
|
||||
if p.Packed {
|
||||
p.enc = (*Buffer).enc_slice_packed_uint32
|
||||
p.size = size_slice_packed_uint32
|
||||
} else {
|
||||
p.enc = (*Buffer).enc_slice_uint32
|
||||
p.size = size_slice_uint32
|
||||
}
|
||||
p.dec = (*Buffer).dec_slice_int32
|
||||
p.packedDec = (*Buffer).dec_slice_packed_int32
|
||||
case reflect.Int64, reflect.Uint64:
|
||||
if p.Packed {
|
||||
p.enc = (*Buffer).enc_slice_packed_int64
|
||||
p.size = size_slice_packed_int64
|
||||
} else {
|
||||
p.enc = (*Buffer).enc_slice_int64
|
||||
p.size = size_slice_int64
|
||||
}
|
||||
p.dec = (*Buffer).dec_slice_int64
|
||||
p.packedDec = (*Buffer).dec_slice_packed_int64
|
||||
case reflect.Uint8:
|
||||
p.enc = (*Buffer).enc_slice_byte
|
||||
p.dec = (*Buffer).dec_slice_byte
|
||||
p.size = size_slice_byte
|
||||
// This is a []byte, which is either a bytes field,
|
||||
// or the value of a map field. In the latter case,
|
||||
// we always encode an empty []byte, so we should not
|
||||
// use the proto3 enc/size funcs.
|
||||
// f == nil iff this is the key/value of a map field.
|
||||
if p.proto3 && f != nil {
|
||||
p.enc = (*Buffer).enc_proto3_slice_byte
|
||||
p.size = size_proto3_slice_byte
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
switch t2.Bits() {
|
||||
case 32:
|
||||
// can just treat them as bits
|
||||
if p.Packed {
|
||||
p.enc = (*Buffer).enc_slice_packed_uint32
|
||||
p.size = size_slice_packed_uint32
|
||||
} else {
|
||||
p.enc = (*Buffer).enc_slice_uint32
|
||||
p.size = size_slice_uint32
|
||||
}
|
||||
p.dec = (*Buffer).dec_slice_int32
|
||||
p.packedDec = (*Buffer).dec_slice_packed_int32
|
||||
case 64:
|
||||
// can just treat them as bits
|
||||
if p.Packed {
|
||||
p.enc = (*Buffer).enc_slice_packed_int64
|
||||
p.size = size_slice_packed_int64
|
||||
} else {
|
||||
p.enc = (*Buffer).enc_slice_int64
|
||||
p.size = size_slice_int64
|
||||
}
|
||||
p.dec = (*Buffer).dec_slice_int64
|
||||
p.packedDec = (*Buffer).dec_slice_packed_int64
|
||||
default:
|
||||
logNoSliceEnc(t1, t2)
|
||||
break
|
||||
}
|
||||
case reflect.String:
|
||||
p.enc = (*Buffer).enc_slice_string
|
||||
p.dec = (*Buffer).dec_slice_string
|
||||
p.size = size_slice_string
|
||||
case reflect.Ptr:
|
||||
switch t3 := t2.Elem(); t3.Kind() {
|
||||
default:
|
||||
fmt.Fprintf(os.Stderr, "proto: no ptr oenc for %T -> %T -> %T\n", t1, t2, t3)
|
||||
break
|
||||
case reflect.Struct:
|
||||
p.stype = t2.Elem()
|
||||
p.isMarshaler = isMarshaler(t2)
|
||||
p.isUnmarshaler = isUnmarshaler(t2)
|
||||
if p.Wire == "bytes" {
|
||||
p.enc = (*Buffer).enc_slice_struct_message
|
||||
p.dec = (*Buffer).dec_slice_struct_message
|
||||
p.size = size_slice_struct_message
|
||||
} else {
|
||||
p.enc = (*Buffer).enc_slice_struct_group
|
||||
p.dec = (*Buffer).dec_slice_struct_group
|
||||
p.size = size_slice_struct_group
|
||||
}
|
||||
}
|
||||
case reflect.Slice:
|
||||
switch t2.Elem().Kind() {
|
||||
default:
|
||||
fmt.Fprintf(os.Stderr, "proto: no slice elem oenc for %T -> %T -> %T\n", t1, t2, t2.Elem())
|
||||
break
|
||||
case reflect.Uint8:
|
||||
p.enc = (*Buffer).enc_slice_slice_byte
|
||||
p.dec = (*Buffer).dec_slice_slice_byte
|
||||
p.size = size_slice_slice_byte
|
||||
}
|
||||
}
|
||||
|
||||
case reflect.Map:
|
||||
p.enc = (*Buffer).enc_new_map
|
||||
p.dec = (*Buffer).dec_new_map
|
||||
p.size = size_new_map
|
||||
|
||||
p.mtype = t1
|
||||
p.mkeyprop = &Properties{}
|
||||
p.mkeyprop.init(reflect.PtrTo(p.mtype.Key()), "Key", f.Tag.Get("protobuf_key"), nil, lockGetProp)
|
||||
p.mvalprop = &Properties{}
|
||||
vtype := p.mtype.Elem()
|
||||
if vtype.Kind() != reflect.Ptr && vtype.Kind() != reflect.Slice {
|
||||
// The value type is not a message (*T) or bytes ([]byte),
|
||||
// so we need encoders for the pointer to this type.
|
||||
vtype = reflect.PtrTo(vtype)
|
||||
}
|
||||
p.mvalprop.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp)
|
||||
}
|
||||
|
||||
// precalculate tag code
|
||||
wire := p.WireType
|
||||
if p.Packed {
|
||||
wire = WireBytes
|
||||
}
|
||||
x := uint32(p.Tag)<<3 | uint32(wire)
|
||||
i := 0
|
||||
for i = 0; x > 127; i++ {
|
||||
p.tagbuf[i] = 0x80 | uint8(x&0x7F)
|
||||
x >>= 7
|
||||
}
|
||||
p.tagbuf[i] = uint8(x)
|
||||
p.tagcode = p.tagbuf[0 : i+1]
|
||||
|
||||
if p.stype != nil {
|
||||
if lockGetProp {
|
||||
p.sprop = GetProperties(p.stype)
|
||||
} else {
|
||||
p.sprop = getPropertiesLocked(p.stype)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem()
|
||||
unmarshalerType = reflect.TypeOf((*Unmarshaler)(nil)).Elem()
|
||||
)
|
||||
|
||||
// isMarshaler reports whether type t implements Marshaler.
|
||||
func isMarshaler(t reflect.Type) bool {
|
||||
// We're checking for (likely) pointer-receiver methods
|
||||
// so if t is not a pointer, something is very wrong.
|
||||
// The calls above only invoke isMarshaler on pointer types.
|
||||
if t.Kind() != reflect.Ptr {
|
||||
panic("proto: misuse of isMarshaler")
|
||||
}
|
||||
return t.Implements(marshalerType)
|
||||
}
|
||||
|
||||
// isUnmarshaler reports whether type t implements Unmarshaler.
|
||||
func isUnmarshaler(t reflect.Type) bool {
|
||||
// We're checking for (likely) pointer-receiver methods
|
||||
// so if t is not a pointer, something is very wrong.
|
||||
// The calls above only invoke isUnmarshaler on pointer types.
|
||||
if t.Kind() != reflect.Ptr {
|
||||
panic("proto: misuse of isUnmarshaler")
|
||||
}
|
||||
return t.Implements(unmarshalerType)
|
||||
}
|
||||
|
||||
// Init populates the properties from a protocol buffer struct tag.
|
||||
func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) {
|
||||
p.init(typ, name, tag, f, true)
|
||||
}
|
||||
|
||||
func (p *Properties) init(typ reflect.Type, name, tag string, f *reflect.StructField, lockGetProp bool) {
|
||||
// "bytes,49,opt,def=hello!"
|
||||
p.Name = name
|
||||
p.OrigName = name
|
||||
if f != nil {
|
||||
p.field = toField(f)
|
||||
}
|
||||
if tag == "" {
|
||||
return
|
||||
}
|
||||
p.Parse(tag)
|
||||
p.setEncAndDec(typ, f, lockGetProp)
|
||||
}
|
||||
|
||||
var (
|
||||
propertiesMu sync.RWMutex
|
||||
propertiesMap = make(map[reflect.Type]*StructProperties)
|
||||
)
|
||||
|
||||
// GetProperties returns the list of properties for the type represented by t.
|
||||
// t must represent a generated struct type of a protocol message.
|
||||
func GetProperties(t reflect.Type) *StructProperties {
|
||||
if t.Kind() != reflect.Struct {
|
||||
panic("proto: type must have kind struct")
|
||||
}
|
||||
|
||||
// Most calls to GetProperties in a long-running program will be
|
||||
// retrieving details for types we have seen before.
|
||||
propertiesMu.RLock()
|
||||
sprop, ok := propertiesMap[t]
|
||||
propertiesMu.RUnlock()
|
||||
if ok {
|
||||
if collectStats {
|
||||
stats.Chit++
|
||||
}
|
||||
return sprop
|
||||
}
|
||||
|
||||
propertiesMu.Lock()
|
||||
sprop = getPropertiesLocked(t)
|
||||
propertiesMu.Unlock()
|
||||
return sprop
|
||||
}
|
||||
|
||||
// getPropertiesLocked requires that propertiesMu is held.
|
||||
func getPropertiesLocked(t reflect.Type) *StructProperties {
|
||||
if prop, ok := propertiesMap[t]; ok {
|
||||
if collectStats {
|
||||
stats.Chit++
|
||||
}
|
||||
return prop
|
||||
}
|
||||
if collectStats {
|
||||
stats.Cmiss++
|
||||
}
|
||||
|
||||
prop := new(StructProperties)
|
||||
// in case of recursive protos, fill this in now.
|
||||
propertiesMap[t] = prop
|
||||
|
||||
// build properties
|
||||
prop.extendable = reflect.PtrTo(t).Implements(extendableProtoType)
|
||||
prop.unrecField = invalidField
|
||||
prop.Prop = make([]*Properties, t.NumField())
|
||||
prop.order = make([]int, t.NumField())
|
||||
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
f := t.Field(i)
|
||||
p := new(Properties)
|
||||
name := f.Name
|
||||
p.init(f.Type, name, f.Tag.Get("protobuf"), &f, false)
|
||||
|
||||
if f.Name == "XXX_extensions" { // special case
|
||||
p.enc = (*Buffer).enc_map
|
||||
p.dec = nil // not needed
|
||||
p.size = size_map
|
||||
}
|
||||
if f.Name == "XXX_unrecognized" { // special case
|
||||
prop.unrecField = toField(&f)
|
||||
}
|
||||
oneof := f.Tag.Get("protobuf_oneof") != "" // special case
|
||||
prop.Prop[i] = p
|
||||
prop.order[i] = i
|
||||
if debug {
|
||||
print(i, " ", f.Name, " ", t.String(), " ")
|
||||
if p.Tag > 0 {
|
||||
print(p.String())
|
||||
}
|
||||
print("\n")
|
||||
}
|
||||
if p.enc == nil && !strings.HasPrefix(f.Name, "XXX_") && !oneof {
|
||||
fmt.Fprintln(os.Stderr, "proto: no encoder for", f.Name, f.Type.String(), "[GetProperties]")
|
||||
}
|
||||
}
|
||||
|
||||
// Re-order prop.order.
|
||||
sort.Sort(prop)
|
||||
|
||||
type oneofMessage interface {
|
||||
XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), []interface{})
|
||||
}
|
||||
if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok {
|
||||
var oots []interface{}
|
||||
prop.oneofMarshaler, prop.oneofUnmarshaler, oots = om.XXX_OneofFuncs()
|
||||
prop.stype = t
|
||||
|
||||
// Interpret oneof metadata.
|
||||
prop.OneofTypes = make(map[string]*OneofProperties)
|
||||
for _, oot := range oots {
|
||||
oop := &OneofProperties{
|
||||
Type: reflect.ValueOf(oot).Type(), // *T
|
||||
Prop: new(Properties),
|
||||
}
|
||||
sft := oop.Type.Elem().Field(0)
|
||||
oop.Prop.Name = sft.Name
|
||||
oop.Prop.Parse(sft.Tag.Get("protobuf"))
|
||||
// There will be exactly one interface field that
|
||||
// this new value is assignable to.
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
f := t.Field(i)
|
||||
if f.Type.Kind() != reflect.Interface {
|
||||
continue
|
||||
}
|
||||
if !oop.Type.AssignableTo(f.Type) {
|
||||
continue
|
||||
}
|
||||
oop.Field = i
|
||||
break
|
||||
}
|
||||
prop.OneofTypes[oop.Prop.OrigName] = oop
|
||||
}
|
||||
}
|
||||
|
||||
// build required counts
|
||||
// build tags
|
||||
reqCount := 0
|
||||
prop.decoderOrigNames = make(map[string]int)
|
||||
for i, p := range prop.Prop {
|
||||
if strings.HasPrefix(p.Name, "XXX_") {
|
||||
// Internal fields should not appear in tags/origNames maps.
|
||||
// They are handled specially when encoding and decoding.
|
||||
continue
|
||||
}
|
||||
if p.Required {
|
||||
reqCount++
|
||||
}
|
||||
prop.decoderTags.put(p.Tag, i)
|
||||
prop.decoderOrigNames[p.OrigName] = i
|
||||
}
|
||||
prop.reqCount = reqCount
|
||||
|
||||
return prop
|
||||
}
|
||||
|
||||
// Return the Properties object for the x[0]'th field of the structure.
|
||||
func propByIndex(t reflect.Type, x []int) *Properties {
|
||||
if len(x) != 1 {
|
||||
fmt.Fprintf(os.Stderr, "proto: field index dimension %d (not 1) for type %s\n", len(x), t)
|
||||
return nil
|
||||
}
|
||||
prop := GetProperties(t)
|
||||
return prop.Prop[x[0]]
|
||||
}
|
||||
|
||||
// Get the address and type of a pointer to a struct from an interface.
|
||||
func getbase(pb Message) (t reflect.Type, b structPointer, err error) {
|
||||
if pb == nil {
|
||||
err = ErrNil
|
||||
return
|
||||
}
|
||||
// get the reflect type of the pointer to the struct.
|
||||
t = reflect.TypeOf(pb)
|
||||
// get the address of the struct.
|
||||
value := reflect.ValueOf(pb)
|
||||
b = toStructPointer(value)
|
||||
return
|
||||
}
|
||||
|
||||
// A global registry of enum types.
|
||||
// The generated code will register the generated maps by calling RegisterEnum.
|
||||
|
||||
var enumValueMaps = make(map[string]map[string]int32)
|
||||
|
||||
// RegisterEnum is called from the generated code to install the enum descriptor
|
||||
// maps into the global table to aid parsing text format protocol buffers.
|
||||
func RegisterEnum(typeName string, unusedNameMap map[int32]string, valueMap map[string]int32) {
|
||||
if _, ok := enumValueMaps[typeName]; ok {
|
||||
panic("proto: duplicate enum registered: " + typeName)
|
||||
}
|
||||
enumValueMaps[typeName] = valueMap
|
||||
}
|
||||
|
||||
// EnumValueMap returns the mapping from names to integers of the
|
||||
// enum type enumType, or a nil if not found.
|
||||
func EnumValueMap(enumType string) map[string]int32 {
|
||||
return enumValueMaps[enumType]
|
||||
}
|
||||
122
Godeps/_workspace/src/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go
generated
vendored
Normal file
122
Godeps/_workspace/src/github.com/golang/protobuf/proto/proto3_proto/proto3.pb.go
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
// Code generated by protoc-gen-go.
|
||||
// source: proto3_proto/proto3.proto
|
||||
// DO NOT EDIT!
|
||||
|
||||
/*
|
||||
Package proto3_proto is a generated protocol buffer package.
|
||||
|
||||
It is generated from these files:
|
||||
proto3_proto/proto3.proto
|
||||
|
||||
It has these top-level messages:
|
||||
Message
|
||||
Nested
|
||||
MessageWithMap
|
||||
*/
|
||||
package proto3_proto
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import testdata "github.com/golang/protobuf/proto/testdata"
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
|
||||
type Message_Humour int32
|
||||
|
||||
const (
|
||||
Message_UNKNOWN Message_Humour = 0
|
||||
Message_PUNS Message_Humour = 1
|
||||
Message_SLAPSTICK Message_Humour = 2
|
||||
Message_BILL_BAILEY Message_Humour = 3
|
||||
)
|
||||
|
||||
var Message_Humour_name = map[int32]string{
|
||||
0: "UNKNOWN",
|
||||
1: "PUNS",
|
||||
2: "SLAPSTICK",
|
||||
3: "BILL_BAILEY",
|
||||
}
|
||||
var Message_Humour_value = map[string]int32{
|
||||
"UNKNOWN": 0,
|
||||
"PUNS": 1,
|
||||
"SLAPSTICK": 2,
|
||||
"BILL_BAILEY": 3,
|
||||
}
|
||||
|
||||
func (x Message_Humour) String() string {
|
||||
return proto.EnumName(Message_Humour_name, int32(x))
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||
Hilarity Message_Humour `protobuf:"varint,2,opt,name=hilarity,enum=proto3_proto.Message_Humour" json:"hilarity,omitempty"`
|
||||
HeightInCm uint32 `protobuf:"varint,3,opt,name=height_in_cm" json:"height_in_cm,omitempty"`
|
||||
Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"`
|
||||
ResultCount int64 `protobuf:"varint,7,opt,name=result_count" json:"result_count,omitempty"`
|
||||
TrueScotsman bool `protobuf:"varint,8,opt,name=true_scotsman" json:"true_scotsman,omitempty"`
|
||||
Score float32 `protobuf:"fixed32,9,opt,name=score" json:"score,omitempty"`
|
||||
Key []uint64 `protobuf:"varint,5,rep,name=key" json:"key,omitempty"`
|
||||
Nested *Nested `protobuf:"bytes,6,opt,name=nested" json:"nested,omitempty"`
|
||||
Terrain map[string]*Nested `protobuf:"bytes,10,rep,name=terrain" json:"terrain,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
Proto2Field *testdata.SubDefaults `protobuf:"bytes,11,opt,name=proto2_field" json:"proto2_field,omitempty"`
|
||||
Proto2Value map[string]*testdata.SubDefaults `protobuf:"bytes,13,rep,name=proto2_value" json:"proto2_value,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
|
||||
}
|
||||
|
||||
func (m *Message) Reset() { *m = Message{} }
|
||||
func (m *Message) String() string { return proto.CompactTextString(m) }
|
||||
func (*Message) ProtoMessage() {}
|
||||
|
||||
func (m *Message) GetNested() *Nested {
|
||||
if m != nil {
|
||||
return m.Nested
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Message) GetTerrain() map[string]*Nested {
|
||||
if m != nil {
|
||||
return m.Terrain
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Message) GetProto2Field() *testdata.SubDefaults {
|
||||
if m != nil {
|
||||
return m.Proto2Field
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Message) GetProto2Value() map[string]*testdata.SubDefaults {
|
||||
if m != nil {
|
||||
return m.Proto2Value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Nested struct {
|
||||
Bunny string `protobuf:"bytes,1,opt,name=bunny" json:"bunny,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Nested) Reset() { *m = Nested{} }
|
||||
func (m *Nested) String() string { return proto.CompactTextString(m) }
|
||||
func (*Nested) ProtoMessage() {}
|
||||
|
||||
type MessageWithMap struct {
|
||||
ByteMapping map[bool][]byte `protobuf:"bytes,1,rep,name=byte_mapping" json:"byte_mapping,omitempty" protobuf_key:"varint,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
}
|
||||
|
||||
func (m *MessageWithMap) Reset() { *m = MessageWithMap{} }
|
||||
func (m *MessageWithMap) String() string { return proto.CompactTextString(m) }
|
||||
func (*MessageWithMap) ProtoMessage() {}
|
||||
|
||||
func (m *MessageWithMap) GetByteMapping() map[bool][]byte {
|
||||
if m != nil {
|
||||
return m.ByteMapping
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterEnum("proto3_proto.Message_Humour", Message_Humour_name, Message_Humour_value)
|
||||
}
|
||||
68
Godeps/_workspace/src/github.com/golang/protobuf/proto/proto3_proto/proto3.proto
generated
vendored
Normal file
68
Godeps/_workspace/src/github.com/golang/protobuf/proto/proto3_proto/proto3.proto
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
import "testdata/test.proto";
|
||||
|
||||
package proto3_proto;
|
||||
|
||||
message Message {
|
||||
enum Humour {
|
||||
UNKNOWN = 0;
|
||||
PUNS = 1;
|
||||
SLAPSTICK = 2;
|
||||
BILL_BAILEY = 3;
|
||||
}
|
||||
|
||||
string name = 1;
|
||||
Humour hilarity = 2;
|
||||
uint32 height_in_cm = 3;
|
||||
bytes data = 4;
|
||||
int64 result_count = 7;
|
||||
bool true_scotsman = 8;
|
||||
float score = 9;
|
||||
|
||||
repeated uint64 key = 5;
|
||||
Nested nested = 6;
|
||||
|
||||
map<string, Nested> terrain = 10;
|
||||
testdata.SubDefaults proto2_field = 11;
|
||||
map<string, testdata.SubDefaults> proto2_value = 13;
|
||||
}
|
||||
|
||||
message Nested {
|
||||
string bunny = 1;
|
||||
}
|
||||
|
||||
message MessageWithMap {
|
||||
map<bool, bytes> byte_mapping = 1;
|
||||
}
|
||||
797
Godeps/_workspace/src/github.com/golang/protobuf/proto/text.go
generated
vendored
Normal file
797
Godeps/_workspace/src/github.com/golang/protobuf/proto/text.go
generated
vendored
Normal file
@@ -0,0 +1,797 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package proto
|
||||
|
||||
// Functions for writing the text protocol buffer format.
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"math"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
newline = []byte("\n")
|
||||
spaces = []byte(" ")
|
||||
gtNewline = []byte(">\n")
|
||||
endBraceNewline = []byte("}\n")
|
||||
backslashN = []byte{'\\', 'n'}
|
||||
backslashR = []byte{'\\', 'r'}
|
||||
backslashT = []byte{'\\', 't'}
|
||||
backslashDQ = []byte{'\\', '"'}
|
||||
backslashBS = []byte{'\\', '\\'}
|
||||
posInf = []byte("inf")
|
||||
negInf = []byte("-inf")
|
||||
nan = []byte("nan")
|
||||
)
|
||||
|
||||
type writer interface {
|
||||
io.Writer
|
||||
WriteByte(byte) error
|
||||
}
|
||||
|
||||
// textWriter is an io.Writer that tracks its indentation level.
|
||||
type textWriter struct {
|
||||
ind int
|
||||
complete bool // if the current position is a complete line
|
||||
compact bool // whether to write out as a one-liner
|
||||
w writer
|
||||
}
|
||||
|
||||
func (w *textWriter) WriteString(s string) (n int, err error) {
|
||||
if !strings.Contains(s, "\n") {
|
||||
if !w.compact && w.complete {
|
||||
w.writeIndent()
|
||||
}
|
||||
w.complete = false
|
||||
return io.WriteString(w.w, s)
|
||||
}
|
||||
// WriteString is typically called without newlines, so this
|
||||
// codepath and its copy are rare. We copy to avoid
|
||||
// duplicating all of Write's logic here.
|
||||
return w.Write([]byte(s))
|
||||
}
|
||||
|
||||
func (w *textWriter) Write(p []byte) (n int, err error) {
|
||||
newlines := bytes.Count(p, newline)
|
||||
if newlines == 0 {
|
||||
if !w.compact && w.complete {
|
||||
w.writeIndent()
|
||||
}
|
||||
n, err = w.w.Write(p)
|
||||
w.complete = false
|
||||
return n, err
|
||||
}
|
||||
|
||||
frags := bytes.SplitN(p, newline, newlines+1)
|
||||
if w.compact {
|
||||
for i, frag := range frags {
|
||||
if i > 0 {
|
||||
if err := w.w.WriteByte(' '); err != nil {
|
||||
return n, err
|
||||
}
|
||||
n++
|
||||
}
|
||||
nn, err := w.w.Write(frag)
|
||||
n += nn
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
}
|
||||
return n, nil
|
||||
}
|
||||
|
||||
for i, frag := range frags {
|
||||
if w.complete {
|
||||
w.writeIndent()
|
||||
}
|
||||
nn, err := w.w.Write(frag)
|
||||
n += nn
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
if i+1 < len(frags) {
|
||||
if err := w.w.WriteByte('\n'); err != nil {
|
||||
return n, err
|
||||
}
|
||||
n++
|
||||
}
|
||||
}
|
||||
w.complete = len(frags[len(frags)-1]) == 0
|
||||
return n, nil
|
||||
}
|
||||
|
||||
func (w *textWriter) WriteByte(c byte) error {
|
||||
if w.compact && c == '\n' {
|
||||
c = ' '
|
||||
}
|
||||
if !w.compact && w.complete {
|
||||
w.writeIndent()
|
||||
}
|
||||
err := w.w.WriteByte(c)
|
||||
w.complete = c == '\n'
|
||||
return err
|
||||
}
|
||||
|
||||
func (w *textWriter) indent() { w.ind++ }
|
||||
|
||||
func (w *textWriter) unindent() {
|
||||
if w.ind == 0 {
|
||||
log.Printf("proto: textWriter unindented too far")
|
||||
return
|
||||
}
|
||||
w.ind--
|
||||
}
|
||||
|
||||
func writeName(w *textWriter, props *Properties) error {
|
||||
if _, err := w.WriteString(props.OrigName); err != nil {
|
||||
return err
|
||||
}
|
||||
if props.Wire != "group" {
|
||||
return w.WriteByte(':')
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
messageSetType = reflect.TypeOf((*MessageSet)(nil)).Elem()
|
||||
)
|
||||
|
||||
// raw is the interface satisfied by RawMessage.
|
||||
type raw interface {
|
||||
Bytes() []byte
|
||||
}
|
||||
|
||||
func writeStruct(w *textWriter, sv reflect.Value) error {
|
||||
if sv.Type() == messageSetType {
|
||||
return writeMessageSet(w, sv.Addr().Interface().(*MessageSet))
|
||||
}
|
||||
|
||||
st := sv.Type()
|
||||
sprops := GetProperties(st)
|
||||
for i := 0; i < sv.NumField(); i++ {
|
||||
fv := sv.Field(i)
|
||||
props := sprops.Prop[i]
|
||||
name := st.Field(i).Name
|
||||
|
||||
if strings.HasPrefix(name, "XXX_") {
|
||||
// There are two XXX_ fields:
|
||||
// XXX_unrecognized []byte
|
||||
// XXX_extensions map[int32]proto.Extension
|
||||
// The first is handled here;
|
||||
// the second is handled at the bottom of this function.
|
||||
if name == "XXX_unrecognized" && !fv.IsNil() {
|
||||
if err := writeUnknownStruct(w, fv.Interface().([]byte)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
if fv.Kind() == reflect.Ptr && fv.IsNil() {
|
||||
// Field not filled in. This could be an optional field or
|
||||
// a required field that wasn't filled in. Either way, there
|
||||
// isn't anything we can show for it.
|
||||
continue
|
||||
}
|
||||
if fv.Kind() == reflect.Slice && fv.IsNil() {
|
||||
// Repeated field that is empty, or a bytes field that is unused.
|
||||
continue
|
||||
}
|
||||
|
||||
if props.Repeated && fv.Kind() == reflect.Slice {
|
||||
// Repeated field.
|
||||
for j := 0; j < fv.Len(); j++ {
|
||||
if err := writeName(w, props); err != nil {
|
||||
return err
|
||||
}
|
||||
if !w.compact {
|
||||
if err := w.WriteByte(' '); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
v := fv.Index(j)
|
||||
if v.Kind() == reflect.Ptr && v.IsNil() {
|
||||
// A nil message in a repeated field is not valid,
|
||||
// but we can handle that more gracefully than panicking.
|
||||
if _, err := w.Write([]byte("<nil>\n")); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
if err := writeAny(w, v, props); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
if fv.Kind() == reflect.Map {
|
||||
// Map fields are rendered as a repeated struct with key/value fields.
|
||||
keys := fv.MapKeys()
|
||||
sort.Sort(mapKeys(keys))
|
||||
for _, key := range keys {
|
||||
val := fv.MapIndex(key)
|
||||
if err := writeName(w, props); err != nil {
|
||||
return err
|
||||
}
|
||||
if !w.compact {
|
||||
if err := w.WriteByte(' '); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// open struct
|
||||
if err := w.WriteByte('<'); err != nil {
|
||||
return err
|
||||
}
|
||||
if !w.compact {
|
||||
if err := w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
w.indent()
|
||||
// key
|
||||
if _, err := w.WriteString("key:"); err != nil {
|
||||
return err
|
||||
}
|
||||
if !w.compact {
|
||||
if err := w.WriteByte(' '); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := writeAny(w, key, props.mkeyprop); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
// nil values aren't legal, but we can avoid panicking because of them.
|
||||
if val.Kind() != reflect.Ptr || !val.IsNil() {
|
||||
// value
|
||||
if _, err := w.WriteString("value:"); err != nil {
|
||||
return err
|
||||
}
|
||||
if !w.compact {
|
||||
if err := w.WriteByte(' '); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := writeAny(w, val, props.mvalprop); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// close struct
|
||||
w.unindent()
|
||||
if err := w.WriteByte('>'); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
if props.proto3 && fv.Kind() == reflect.Slice && fv.Len() == 0 {
|
||||
// empty bytes field
|
||||
continue
|
||||
}
|
||||
if fv.Kind() != reflect.Ptr && fv.Kind() != reflect.Slice {
|
||||
// proto3 non-repeated scalar field; skip if zero value
|
||||
if isProto3Zero(fv) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if fv.Kind() == reflect.Interface {
|
||||
// Check if it is a oneof.
|
||||
if st.Field(i).Tag.Get("protobuf_oneof") != "" {
|
||||
// fv is nil, or holds a pointer to generated struct.
|
||||
// That generated struct has exactly one field,
|
||||
// which has a protobuf struct tag.
|
||||
if fv.IsNil() {
|
||||
continue
|
||||
}
|
||||
inner := fv.Elem().Elem() // interface -> *T -> T
|
||||
tag := inner.Type().Field(0).Tag.Get("protobuf")
|
||||
props = new(Properties) // Overwrite the outer props var, but not its pointee.
|
||||
props.Parse(tag)
|
||||
// Write the value in the oneof, not the oneof itself.
|
||||
fv = inner.Field(0)
|
||||
|
||||
// Special case to cope with malformed messages gracefully:
|
||||
// If the value in the oneof is a nil pointer, don't panic
|
||||
// in writeAny.
|
||||
if fv.Kind() == reflect.Ptr && fv.IsNil() {
|
||||
// Use errors.New so writeAny won't render quotes.
|
||||
msg := errors.New("/* nil */")
|
||||
fv = reflect.ValueOf(&msg).Elem()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := writeName(w, props); err != nil {
|
||||
return err
|
||||
}
|
||||
if !w.compact {
|
||||
if err := w.WriteByte(' '); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if b, ok := fv.Interface().(raw); ok {
|
||||
if err := writeRaw(w, b.Bytes()); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Enums have a String method, so writeAny will work fine.
|
||||
if err := writeAny(w, fv, props); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Extensions (the XXX_extensions field).
|
||||
pv := sv.Addr()
|
||||
if pv.Type().Implements(extendableProtoType) {
|
||||
if err := writeExtensions(w, pv); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// writeRaw writes an uninterpreted raw message.
|
||||
func writeRaw(w *textWriter, b []byte) error {
|
||||
if err := w.WriteByte('<'); err != nil {
|
||||
return err
|
||||
}
|
||||
if !w.compact {
|
||||
if err := w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
w.indent()
|
||||
if err := writeUnknownStruct(w, b); err != nil {
|
||||
return err
|
||||
}
|
||||
w.unindent()
|
||||
if err := w.WriteByte('>'); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// writeAny writes an arbitrary field.
|
||||
func writeAny(w *textWriter, v reflect.Value, props *Properties) error {
|
||||
v = reflect.Indirect(v)
|
||||
|
||||
// Floats have special cases.
|
||||
if v.Kind() == reflect.Float32 || v.Kind() == reflect.Float64 {
|
||||
x := v.Float()
|
||||
var b []byte
|
||||
switch {
|
||||
case math.IsInf(x, 1):
|
||||
b = posInf
|
||||
case math.IsInf(x, -1):
|
||||
b = negInf
|
||||
case math.IsNaN(x):
|
||||
b = nan
|
||||
}
|
||||
if b != nil {
|
||||
_, err := w.Write(b)
|
||||
return err
|
||||
}
|
||||
// Other values are handled below.
|
||||
}
|
||||
|
||||
// We don't attempt to serialise every possible value type; only those
|
||||
// that can occur in protocol buffers.
|
||||
switch v.Kind() {
|
||||
case reflect.Slice:
|
||||
// Should only be a []byte; repeated fields are handled in writeStruct.
|
||||
if err := writeString(w, string(v.Interface().([]byte))); err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.String:
|
||||
if err := writeString(w, v.String()); err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Struct:
|
||||
// Required/optional group/message.
|
||||
var bra, ket byte = '<', '>'
|
||||
if props != nil && props.Wire == "group" {
|
||||
bra, ket = '{', '}'
|
||||
}
|
||||
if err := w.WriteByte(bra); err != nil {
|
||||
return err
|
||||
}
|
||||
if !w.compact {
|
||||
if err := w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
w.indent()
|
||||
if tm, ok := v.Interface().(encoding.TextMarshaler); ok {
|
||||
text, err := tm.MarshalText()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = w.Write(text); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if err := writeStruct(w, v); err != nil {
|
||||
return err
|
||||
}
|
||||
w.unindent()
|
||||
if err := w.WriteByte(ket); err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
_, err := fmt.Fprint(w, v.Interface())
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// equivalent to C's isprint.
|
||||
func isprint(c byte) bool {
|
||||
return c >= 0x20 && c < 0x7f
|
||||
}
|
||||
|
||||
// writeString writes a string in the protocol buffer text format.
|
||||
// It is similar to strconv.Quote except we don't use Go escape sequences,
|
||||
// we treat the string as a byte sequence, and we use octal escapes.
|
||||
// These differences are to maintain interoperability with the other
|
||||
// languages' implementations of the text format.
|
||||
func writeString(w *textWriter, s string) error {
|
||||
// use WriteByte here to get any needed indent
|
||||
if err := w.WriteByte('"'); err != nil {
|
||||
return err
|
||||
}
|
||||
// Loop over the bytes, not the runes.
|
||||
for i := 0; i < len(s); i++ {
|
||||
var err error
|
||||
// Divergence from C++: we don't escape apostrophes.
|
||||
// There's no need to escape them, and the C++ parser
|
||||
// copes with a naked apostrophe.
|
||||
switch c := s[i]; c {
|
||||
case '\n':
|
||||
_, err = w.w.Write(backslashN)
|
||||
case '\r':
|
||||
_, err = w.w.Write(backslashR)
|
||||
case '\t':
|
||||
_, err = w.w.Write(backslashT)
|
||||
case '"':
|
||||
_, err = w.w.Write(backslashDQ)
|
||||
case '\\':
|
||||
_, err = w.w.Write(backslashBS)
|
||||
default:
|
||||
if isprint(c) {
|
||||
err = w.w.WriteByte(c)
|
||||
} else {
|
||||
_, err = fmt.Fprintf(w.w, "\\%03o", c)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return w.WriteByte('"')
|
||||
}
|
||||
|
||||
func writeMessageSet(w *textWriter, ms *MessageSet) error {
|
||||
for _, item := range ms.Item {
|
||||
id := *item.TypeId
|
||||
if msd, ok := messageSetMap[id]; ok {
|
||||
// Known message set type.
|
||||
if _, err := fmt.Fprintf(w, "[%s]: <\n", msd.name); err != nil {
|
||||
return err
|
||||
}
|
||||
w.indent()
|
||||
|
||||
pb := reflect.New(msd.t.Elem())
|
||||
if err := Unmarshal(item.Message, pb.Interface().(Message)); err != nil {
|
||||
if _, err := fmt.Fprintf(w, "/* bad message: %v */\n", err); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := writeStruct(w, pb.Elem()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Unknown type.
|
||||
if _, err := fmt.Fprintf(w, "[%d]: <\n", id); err != nil {
|
||||
return err
|
||||
}
|
||||
w.indent()
|
||||
if err := writeUnknownStruct(w, item.Message); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
w.unindent()
|
||||
if _, err := w.Write(gtNewline); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeUnknownStruct(w *textWriter, data []byte) (err error) {
|
||||
if !w.compact {
|
||||
if _, err := fmt.Fprintf(w, "/* %d unknown bytes */\n", len(data)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
b := NewBuffer(data)
|
||||
for b.index < len(b.buf) {
|
||||
x, err := b.DecodeVarint()
|
||||
if err != nil {
|
||||
_, err := fmt.Fprintf(w, "/* %v */\n", err)
|
||||
return err
|
||||
}
|
||||
wire, tag := x&7, x>>3
|
||||
if wire == WireEndGroup {
|
||||
w.unindent()
|
||||
if _, err := w.Write(endBraceNewline); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
if _, err := fmt.Fprint(w, tag); err != nil {
|
||||
return err
|
||||
}
|
||||
if wire != WireStartGroup {
|
||||
if err := w.WriteByte(':'); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if !w.compact || wire == WireStartGroup {
|
||||
if err := w.WriteByte(' '); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
switch wire {
|
||||
case WireBytes:
|
||||
buf, e := b.DecodeRawBytes(false)
|
||||
if e == nil {
|
||||
_, err = fmt.Fprintf(w, "%q", buf)
|
||||
} else {
|
||||
_, err = fmt.Fprintf(w, "/* %v */", e)
|
||||
}
|
||||
case WireFixed32:
|
||||
x, err = b.DecodeFixed32()
|
||||
err = writeUnknownInt(w, x, err)
|
||||
case WireFixed64:
|
||||
x, err = b.DecodeFixed64()
|
||||
err = writeUnknownInt(w, x, err)
|
||||
case WireStartGroup:
|
||||
err = w.WriteByte('{')
|
||||
w.indent()
|
||||
case WireVarint:
|
||||
x, err = b.DecodeVarint()
|
||||
err = writeUnknownInt(w, x, err)
|
||||
default:
|
||||
_, err = fmt.Fprintf(w, "/* unknown wire type %d */", wire)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeUnknownInt(w *textWriter, x uint64, err error) error {
|
||||
if err == nil {
|
||||
_, err = fmt.Fprint(w, x)
|
||||
} else {
|
||||
_, err = fmt.Fprintf(w, "/* %v */", err)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
type int32Slice []int32
|
||||
|
||||
func (s int32Slice) Len() int { return len(s) }
|
||||
func (s int32Slice) Less(i, j int) bool { return s[i] < s[j] }
|
||||
func (s int32Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||
|
||||
// writeExtensions writes all the extensions in pv.
|
||||
// pv is assumed to be a pointer to a protocol message struct that is extendable.
|
||||
func writeExtensions(w *textWriter, pv reflect.Value) error {
|
||||
emap := extensionMaps[pv.Type().Elem()]
|
||||
ep := pv.Interface().(extendableProto)
|
||||
|
||||
// Order the extensions by ID.
|
||||
// This isn't strictly necessary, but it will give us
|
||||
// canonical output, which will also make testing easier.
|
||||
m := ep.ExtensionMap()
|
||||
ids := make([]int32, 0, len(m))
|
||||
for id := range m {
|
||||
ids = append(ids, id)
|
||||
}
|
||||
sort.Sort(int32Slice(ids))
|
||||
|
||||
for _, extNum := range ids {
|
||||
ext := m[extNum]
|
||||
var desc *ExtensionDesc
|
||||
if emap != nil {
|
||||
desc = emap[extNum]
|
||||
}
|
||||
if desc == nil {
|
||||
// Unknown extension.
|
||||
if err := writeUnknownStruct(w, ext.enc); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
pb, err := GetExtension(ep, desc)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed getting extension: %v", err)
|
||||
}
|
||||
|
||||
// Repeated extensions will appear as a slice.
|
||||
if !desc.repeated() {
|
||||
if err := writeExtension(w, desc.Name, pb); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
v := reflect.ValueOf(pb)
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
if err := writeExtension(w, desc.Name, v.Index(i).Interface()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeExtension(w *textWriter, name string, pb interface{}) error {
|
||||
if _, err := fmt.Fprintf(w, "[%s]:", name); err != nil {
|
||||
return err
|
||||
}
|
||||
if !w.compact {
|
||||
if err := w.WriteByte(' '); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := writeAny(w, reflect.ValueOf(pb), nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := w.WriteByte('\n'); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *textWriter) writeIndent() {
|
||||
if !w.complete {
|
||||
return
|
||||
}
|
||||
remain := w.ind * 2
|
||||
for remain > 0 {
|
||||
n := remain
|
||||
if n > len(spaces) {
|
||||
n = len(spaces)
|
||||
}
|
||||
w.w.Write(spaces[:n])
|
||||
remain -= n
|
||||
}
|
||||
w.complete = false
|
||||
}
|
||||
|
||||
func marshalText(w io.Writer, pb Message, compact bool) error {
|
||||
val := reflect.ValueOf(pb)
|
||||
if pb == nil || val.IsNil() {
|
||||
w.Write([]byte("<nil>"))
|
||||
return nil
|
||||
}
|
||||
var bw *bufio.Writer
|
||||
ww, ok := w.(writer)
|
||||
if !ok {
|
||||
bw = bufio.NewWriter(w)
|
||||
ww = bw
|
||||
}
|
||||
aw := &textWriter{
|
||||
w: ww,
|
||||
complete: true,
|
||||
compact: compact,
|
||||
}
|
||||
|
||||
if tm, ok := pb.(encoding.TextMarshaler); ok {
|
||||
text, err := tm.MarshalText()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = aw.Write(text); err != nil {
|
||||
return err
|
||||
}
|
||||
if bw != nil {
|
||||
return bw.Flush()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
// Dereference the received pointer so we don't have outer < and >.
|
||||
v := reflect.Indirect(val)
|
||||
if err := writeStruct(aw, v); err != nil {
|
||||
return err
|
||||
}
|
||||
if bw != nil {
|
||||
return bw.Flush()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalText writes a given protocol buffer in text format.
|
||||
// The only errors returned are from w.
|
||||
func MarshalText(w io.Writer, pb Message) error {
|
||||
return marshalText(w, pb, false)
|
||||
}
|
||||
|
||||
// MarshalTextString is the same as MarshalText, but returns the string directly.
|
||||
func MarshalTextString(pb Message) string {
|
||||
var buf bytes.Buffer
|
||||
marshalText(&buf, pb, false)
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// CompactText writes a given protocol buffer in compact text format (one line).
|
||||
func CompactText(w io.Writer, pb Message) error { return marshalText(w, pb, true) }
|
||||
|
||||
// CompactTextString is the same as CompactText, but returns the string directly.
|
||||
func CompactTextString(pb Message) string {
|
||||
var buf bytes.Buffer
|
||||
marshalText(&buf, pb, true)
|
||||
return buf.String()
|
||||
}
|
||||
798
Godeps/_workspace/src/github.com/golang/protobuf/proto/text_parser.go
generated
vendored
Normal file
798
Godeps/_workspace/src/github.com/golang/protobuf/proto/text_parser.go
generated
vendored
Normal file
@@ -0,0 +1,798 @@
|
||||
// Go support for Protocol Buffers - Google's data interchange format
|
||||
//
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// https://github.com/golang/protobuf
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package proto
|
||||
|
||||
// Functions for parsing the Text protocol buffer format.
|
||||
// TODO: message sets.
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type ParseError struct {
|
||||
Message string
|
||||
Line int // 1-based line number
|
||||
Offset int // 0-based byte offset from start of input
|
||||
}
|
||||
|
||||
func (p *ParseError) Error() string {
|
||||
if p.Line == 1 {
|
||||
// show offset only for first line
|
||||
return fmt.Sprintf("line 1.%d: %v", p.Offset, p.Message)
|
||||
}
|
||||
return fmt.Sprintf("line %d: %v", p.Line, p.Message)
|
||||
}
|
||||
|
||||
type token struct {
|
||||
value string
|
||||
err *ParseError
|
||||
line int // line number
|
||||
offset int // byte number from start of input, not start of line
|
||||
unquoted string // the unquoted version of value, if it was a quoted string
|
||||
}
|
||||
|
||||
func (t *token) String() string {
|
||||
if t.err == nil {
|
||||
return fmt.Sprintf("%q (line=%d, offset=%d)", t.value, t.line, t.offset)
|
||||
}
|
||||
return fmt.Sprintf("parse error: %v", t.err)
|
||||
}
|
||||
|
||||
type textParser struct {
|
||||
s string // remaining input
|
||||
done bool // whether the parsing is finished (success or error)
|
||||
backed bool // whether back() was called
|
||||
offset, line int
|
||||
cur token
|
||||
}
|
||||
|
||||
func newTextParser(s string) *textParser {
|
||||
p := new(textParser)
|
||||
p.s = s
|
||||
p.line = 1
|
||||
p.cur.line = 1
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *textParser) errorf(format string, a ...interface{}) *ParseError {
|
||||
pe := &ParseError{fmt.Sprintf(format, a...), p.cur.line, p.cur.offset}
|
||||
p.cur.err = pe
|
||||
p.done = true
|
||||
return pe
|
||||
}
|
||||
|
||||
// Numbers and identifiers are matched by [-+._A-Za-z0-9]
|
||||
func isIdentOrNumberChar(c byte) bool {
|
||||
switch {
|
||||
case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z':
|
||||
return true
|
||||
case '0' <= c && c <= '9':
|
||||
return true
|
||||
}
|
||||
switch c {
|
||||
case '-', '+', '.', '_':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isWhitespace(c byte) bool {
|
||||
switch c {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *textParser) skipWhitespace() {
|
||||
i := 0
|
||||
for i < len(p.s) && (isWhitespace(p.s[i]) || p.s[i] == '#') {
|
||||
if p.s[i] == '#' {
|
||||
// comment; skip to end of line or input
|
||||
for i < len(p.s) && p.s[i] != '\n' {
|
||||
i++
|
||||
}
|
||||
if i == len(p.s) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if p.s[i] == '\n' {
|
||||
p.line++
|
||||
}
|
||||
i++
|
||||
}
|
||||
p.offset += i
|
||||
p.s = p.s[i:len(p.s)]
|
||||
if len(p.s) == 0 {
|
||||
p.done = true
|
||||
}
|
||||
}
|
||||
|
||||
func (p *textParser) advance() {
|
||||
// Skip whitespace
|
||||
p.skipWhitespace()
|
||||
if p.done {
|
||||
return
|
||||
}
|
||||
|
||||
// Start of non-whitespace
|
||||
p.cur.err = nil
|
||||
p.cur.offset, p.cur.line = p.offset, p.line
|
||||
p.cur.unquoted = ""
|
||||
switch p.s[0] {
|
||||
case '<', '>', '{', '}', ':', '[', ']', ';', ',':
|
||||
// Single symbol
|
||||
p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)]
|
||||
case '"', '\'':
|
||||
// Quoted string
|
||||
i := 1
|
||||
for i < len(p.s) && p.s[i] != p.s[0] && p.s[i] != '\n' {
|
||||
if p.s[i] == '\\' && i+1 < len(p.s) {
|
||||
// skip escaped char
|
||||
i++
|
||||
}
|
||||
i++
|
||||
}
|
||||
if i >= len(p.s) || p.s[i] != p.s[0] {
|
||||
p.errorf("unmatched quote")
|
||||
return
|
||||
}
|
||||
unq, err := unquoteC(p.s[1:i], rune(p.s[0]))
|
||||
if err != nil {
|
||||
p.errorf("invalid quoted string %s: %v", p.s[0:i+1], err)
|
||||
return
|
||||
}
|
||||
p.cur.value, p.s = p.s[0:i+1], p.s[i+1:len(p.s)]
|
||||
p.cur.unquoted = unq
|
||||
default:
|
||||
i := 0
|
||||
for i < len(p.s) && isIdentOrNumberChar(p.s[i]) {
|
||||
i++
|
||||
}
|
||||
if i == 0 {
|
||||
p.errorf("unexpected byte %#x", p.s[0])
|
||||
return
|
||||
}
|
||||
p.cur.value, p.s = p.s[0:i], p.s[i:len(p.s)]
|
||||
}
|
||||
p.offset += len(p.cur.value)
|
||||
}
|
||||
|
||||
var (
|
||||
errBadUTF8 = errors.New("proto: bad UTF-8")
|
||||
errBadHex = errors.New("proto: bad hexadecimal")
|
||||
)
|
||||
|
||||
func unquoteC(s string, quote rune) (string, error) {
|
||||
// This is based on C++'s tokenizer.cc.
|
||||
// Despite its name, this is *not* parsing C syntax.
|
||||
// For instance, "\0" is an invalid quoted string.
|
||||
|
||||
// Avoid allocation in trivial cases.
|
||||
simple := true
|
||||
for _, r := range s {
|
||||
if r == '\\' || r == quote {
|
||||
simple = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if simple {
|
||||
return s, nil
|
||||
}
|
||||
|
||||
buf := make([]byte, 0, 3*len(s)/2)
|
||||
for len(s) > 0 {
|
||||
r, n := utf8.DecodeRuneInString(s)
|
||||
if r == utf8.RuneError && n == 1 {
|
||||
return "", errBadUTF8
|
||||
}
|
||||
s = s[n:]
|
||||
if r != '\\' {
|
||||
if r < utf8.RuneSelf {
|
||||
buf = append(buf, byte(r))
|
||||
} else {
|
||||
buf = append(buf, string(r)...)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
ch, tail, err := unescape(s)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
buf = append(buf, ch...)
|
||||
s = tail
|
||||
}
|
||||
return string(buf), nil
|
||||
}
|
||||
|
||||
func unescape(s string) (ch string, tail string, err error) {
|
||||
r, n := utf8.DecodeRuneInString(s)
|
||||
if r == utf8.RuneError && n == 1 {
|
||||
return "", "", errBadUTF8
|
||||
}
|
||||
s = s[n:]
|
||||
switch r {
|
||||
case 'a':
|
||||
return "\a", s, nil
|
||||
case 'b':
|
||||
return "\b", s, nil
|
||||
case 'f':
|
||||
return "\f", s, nil
|
||||
case 'n':
|
||||
return "\n", s, nil
|
||||
case 'r':
|
||||
return "\r", s, nil
|
||||
case 't':
|
||||
return "\t", s, nil
|
||||
case 'v':
|
||||
return "\v", s, nil
|
||||
case '?':
|
||||
return "?", s, nil // trigraph workaround
|
||||
case '\'', '"', '\\':
|
||||
return string(r), s, nil
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', 'x', 'X':
|
||||
if len(s) < 2 {
|
||||
return "", "", fmt.Errorf(`\%c requires 2 following digits`, r)
|
||||
}
|
||||
base := 8
|
||||
ss := s[:2]
|
||||
s = s[2:]
|
||||
if r == 'x' || r == 'X' {
|
||||
base = 16
|
||||
} else {
|
||||
ss = string(r) + ss
|
||||
}
|
||||
i, err := strconv.ParseUint(ss, base, 8)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
return string([]byte{byte(i)}), s, nil
|
||||
case 'u', 'U':
|
||||
n := 4
|
||||
if r == 'U' {
|
||||
n = 8
|
||||
}
|
||||
if len(s) < n {
|
||||
return "", "", fmt.Errorf(`\%c requires %d digits`, r, n)
|
||||
}
|
||||
|
||||
bs := make([]byte, n/2)
|
||||
for i := 0; i < n; i += 2 {
|
||||
a, ok1 := unhex(s[i])
|
||||
b, ok2 := unhex(s[i+1])
|
||||
if !ok1 || !ok2 {
|
||||
return "", "", errBadHex
|
||||
}
|
||||
bs[i/2] = a<<4 | b
|
||||
}
|
||||
s = s[n:]
|
||||
return string(bs), s, nil
|
||||
}
|
||||
return "", "", fmt.Errorf(`unknown escape \%c`, r)
|
||||
}
|
||||
|
||||
// Adapted from src/pkg/strconv/quote.go.
|
||||
func unhex(b byte) (v byte, ok bool) {
|
||||
switch {
|
||||
case '0' <= b && b <= '9':
|
||||
return b - '0', true
|
||||
case 'a' <= b && b <= 'f':
|
||||
return b - 'a' + 10, true
|
||||
case 'A' <= b && b <= 'F':
|
||||
return b - 'A' + 10, true
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// Back off the parser by one token. Can only be done between calls to next().
|
||||
// It makes the next advance() a no-op.
|
||||
func (p *textParser) back() { p.backed = true }
|
||||
|
||||
// Advances the parser and returns the new current token.
|
||||
func (p *textParser) next() *token {
|
||||
if p.backed || p.done {
|
||||
p.backed = false
|
||||
return &p.cur
|
||||
}
|
||||
p.advance()
|
||||
if p.done {
|
||||
p.cur.value = ""
|
||||
} else if len(p.cur.value) > 0 && p.cur.value[0] == '"' {
|
||||
// Look for multiple quoted strings separated by whitespace,
|
||||
// and concatenate them.
|
||||
cat := p.cur
|
||||
for {
|
||||
p.skipWhitespace()
|
||||
if p.done || p.s[0] != '"' {
|
||||
break
|
||||
}
|
||||
p.advance()
|
||||
if p.cur.err != nil {
|
||||
return &p.cur
|
||||
}
|
||||
cat.value += " " + p.cur.value
|
||||
cat.unquoted += p.cur.unquoted
|
||||
}
|
||||
p.done = false // parser may have seen EOF, but we want to return cat
|
||||
p.cur = cat
|
||||
}
|
||||
return &p.cur
|
||||
}
|
||||
|
||||
func (p *textParser) consumeToken(s string) error {
|
||||
tok := p.next()
|
||||
if tok.err != nil {
|
||||
return tok.err
|
||||
}
|
||||
if tok.value != s {
|
||||
p.back()
|
||||
return p.errorf("expected %q, found %q", s, tok.value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Return a RequiredNotSetError indicating which required field was not set.
|
||||
func (p *textParser) missingRequiredFieldError(sv reflect.Value) *RequiredNotSetError {
|
||||
st := sv.Type()
|
||||
sprops := GetProperties(st)
|
||||
for i := 0; i < st.NumField(); i++ {
|
||||
if !isNil(sv.Field(i)) {
|
||||
continue
|
||||
}
|
||||
|
||||
props := sprops.Prop[i]
|
||||
if props.Required {
|
||||
return &RequiredNotSetError{fmt.Sprintf("%v.%v", st, props.OrigName)}
|
||||
}
|
||||
}
|
||||
return &RequiredNotSetError{fmt.Sprintf("%v.<unknown field name>", st)} // should not happen
|
||||
}
|
||||
|
||||
// Returns the index in the struct for the named field, as well as the parsed tag properties.
|
||||
func structFieldByName(sprops *StructProperties, name string) (int, *Properties, bool) {
|
||||
i, ok := sprops.decoderOrigNames[name]
|
||||
if ok {
|
||||
return i, sprops.Prop[i], true
|
||||
}
|
||||
return -1, nil, false
|
||||
}
|
||||
|
||||
// Consume a ':' from the input stream (if the next token is a colon),
|
||||
// returning an error if a colon is needed but not present.
|
||||
func (p *textParser) checkForColon(props *Properties, typ reflect.Type) *ParseError {
|
||||
tok := p.next()
|
||||
if tok.err != nil {
|
||||
return tok.err
|
||||
}
|
||||
if tok.value != ":" {
|
||||
// Colon is optional when the field is a group or message.
|
||||
needColon := true
|
||||
switch props.Wire {
|
||||
case "group":
|
||||
needColon = false
|
||||
case "bytes":
|
||||
// A "bytes" field is either a message, a string, or a repeated field;
|
||||
// those three become *T, *string and []T respectively, so we can check for
|
||||
// this field being a pointer to a non-string.
|
||||
if typ.Kind() == reflect.Ptr {
|
||||
// *T or *string
|
||||
if typ.Elem().Kind() == reflect.String {
|
||||
break
|
||||
}
|
||||
} else if typ.Kind() == reflect.Slice {
|
||||
// []T or []*T
|
||||
if typ.Elem().Kind() != reflect.Ptr {
|
||||
break
|
||||
}
|
||||
} else if typ.Kind() == reflect.String {
|
||||
// The proto3 exception is for a string field,
|
||||
// which requires a colon.
|
||||
break
|
||||
}
|
||||
needColon = false
|
||||
}
|
||||
if needColon {
|
||||
return p.errorf("expected ':', found %q", tok.value)
|
||||
}
|
||||
p.back()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *textParser) readStruct(sv reflect.Value, terminator string) error {
|
||||
st := sv.Type()
|
||||
sprops := GetProperties(st)
|
||||
reqCount := sprops.reqCount
|
||||
var reqFieldErr error
|
||||
fieldSet := make(map[string]bool)
|
||||
// A struct is a sequence of "name: value", terminated by one of
|
||||
// '>' or '}', or the end of the input. A name may also be
|
||||
// "[extension]".
|
||||
for {
|
||||
tok := p.next()
|
||||
if tok.err != nil {
|
||||
return tok.err
|
||||
}
|
||||
if tok.value == terminator {
|
||||
break
|
||||
}
|
||||
if tok.value == "[" {
|
||||
// Looks like an extension.
|
||||
//
|
||||
// TODO: Check whether we need to handle
|
||||
// namespace rooted names (e.g. ".something.Foo").
|
||||
tok = p.next()
|
||||
if tok.err != nil {
|
||||
return tok.err
|
||||
}
|
||||
var desc *ExtensionDesc
|
||||
// This could be faster, but it's functional.
|
||||
// TODO: Do something smarter than a linear scan.
|
||||
for _, d := range RegisteredExtensions(reflect.New(st).Interface().(Message)) {
|
||||
if d.Name == tok.value {
|
||||
desc = d
|
||||
break
|
||||
}
|
||||
}
|
||||
if desc == nil {
|
||||
return p.errorf("unrecognized extension %q", tok.value)
|
||||
}
|
||||
// Check the extension terminator.
|
||||
tok = p.next()
|
||||
if tok.err != nil {
|
||||
return tok.err
|
||||
}
|
||||
if tok.value != "]" {
|
||||
return p.errorf("unrecognized extension terminator %q", tok.value)
|
||||
}
|
||||
|
||||
props := &Properties{}
|
||||
props.Parse(desc.Tag)
|
||||
|
||||
typ := reflect.TypeOf(desc.ExtensionType)
|
||||
if err := p.checkForColon(props, typ); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
rep := desc.repeated()
|
||||
|
||||
// Read the extension structure, and set it in
|
||||
// the value we're constructing.
|
||||
var ext reflect.Value
|
||||
if !rep {
|
||||
ext = reflect.New(typ).Elem()
|
||||
} else {
|
||||
ext = reflect.New(typ.Elem()).Elem()
|
||||
}
|
||||
if err := p.readAny(ext, props); err != nil {
|
||||
if _, ok := err.(*RequiredNotSetError); !ok {
|
||||
return err
|
||||
}
|
||||
reqFieldErr = err
|
||||
}
|
||||
ep := sv.Addr().Interface().(extendableProto)
|
||||
if !rep {
|
||||
SetExtension(ep, desc, ext.Interface())
|
||||
} else {
|
||||
old, err := GetExtension(ep, desc)
|
||||
var sl reflect.Value
|
||||
if err == nil {
|
||||
sl = reflect.ValueOf(old) // existing slice
|
||||
} else {
|
||||
sl = reflect.MakeSlice(typ, 0, 1)
|
||||
}
|
||||
sl = reflect.Append(sl, ext)
|
||||
SetExtension(ep, desc, sl.Interface())
|
||||
}
|
||||
if err := p.consumeOptionalSeparator(); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// This is a normal, non-extension field.
|
||||
name := tok.value
|
||||
var dst reflect.Value
|
||||
fi, props, ok := structFieldByName(sprops, name)
|
||||
if ok {
|
||||
dst = sv.Field(fi)
|
||||
} else if oop, ok := sprops.OneofTypes[name]; ok {
|
||||
// It is a oneof.
|
||||
props = oop.Prop
|
||||
nv := reflect.New(oop.Type.Elem())
|
||||
dst = nv.Elem().Field(0)
|
||||
sv.Field(oop.Field).Set(nv)
|
||||
}
|
||||
if !dst.IsValid() {
|
||||
return p.errorf("unknown field name %q in %v", name, st)
|
||||
}
|
||||
|
||||
if dst.Kind() == reflect.Map {
|
||||
// Consume any colon.
|
||||
if err := p.checkForColon(props, dst.Type()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Construct the map if it doesn't already exist.
|
||||
if dst.IsNil() {
|
||||
dst.Set(reflect.MakeMap(dst.Type()))
|
||||
}
|
||||
key := reflect.New(dst.Type().Key()).Elem()
|
||||
val := reflect.New(dst.Type().Elem()).Elem()
|
||||
|
||||
// The map entry should be this sequence of tokens:
|
||||
// < key : KEY value : VALUE >
|
||||
// Technically the "key" and "value" could come in any order,
|
||||
// but in practice they won't.
|
||||
|
||||
tok := p.next()
|
||||
var terminator string
|
||||
switch tok.value {
|
||||
case "<":
|
||||
terminator = ">"
|
||||
case "{":
|
||||
terminator = "}"
|
||||
default:
|
||||
return p.errorf("expected '{' or '<', found %q", tok.value)
|
||||
}
|
||||
if err := p.consumeToken("key"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.consumeToken(":"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.readAny(key, props.mkeyprop); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.consumeOptionalSeparator(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.consumeToken("value"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.checkForColon(props.mvalprop, dst.Type().Elem()); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.readAny(val, props.mvalprop); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.consumeOptionalSeparator(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.consumeToken(terminator); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dst.SetMapIndex(key, val)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check that it's not already set if it's not a repeated field.
|
||||
if !props.Repeated && fieldSet[name] {
|
||||
return p.errorf("non-repeated field %q was repeated", name)
|
||||
}
|
||||
|
||||
if err := p.checkForColon(props, dst.Type()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Parse into the field.
|
||||
fieldSet[name] = true
|
||||
if err := p.readAny(dst, props); err != nil {
|
||||
if _, ok := err.(*RequiredNotSetError); !ok {
|
||||
return err
|
||||
}
|
||||
reqFieldErr = err
|
||||
} else if props.Required {
|
||||
reqCount--
|
||||
}
|
||||
|
||||
if err := p.consumeOptionalSeparator(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if reqCount > 0 {
|
||||
return p.missingRequiredFieldError(sv)
|
||||
}
|
||||
return reqFieldErr
|
||||
}
|
||||
|
||||
// consumeOptionalSeparator consumes an optional semicolon or comma.
|
||||
// It is used in readStruct to provide backward compatibility.
|
||||
func (p *textParser) consumeOptionalSeparator() error {
|
||||
tok := p.next()
|
||||
if tok.err != nil {
|
||||
return tok.err
|
||||
}
|
||||
if tok.value != ";" && tok.value != "," {
|
||||
p.back()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *textParser) readAny(v reflect.Value, props *Properties) error {
|
||||
tok := p.next()
|
||||
if tok.err != nil {
|
||||
return tok.err
|
||||
}
|
||||
if tok.value == "" {
|
||||
return p.errorf("unexpected EOF")
|
||||
}
|
||||
|
||||
switch fv := v; fv.Kind() {
|
||||
case reflect.Slice:
|
||||
at := v.Type()
|
||||
if at.Elem().Kind() == reflect.Uint8 {
|
||||
// Special case for []byte
|
||||
if tok.value[0] != '"' && tok.value[0] != '\'' {
|
||||
// Deliberately written out here, as the error after
|
||||
// this switch statement would write "invalid []byte: ...",
|
||||
// which is not as user-friendly.
|
||||
return p.errorf("invalid string: %v", tok.value)
|
||||
}
|
||||
bytes := []byte(tok.unquoted)
|
||||
fv.Set(reflect.ValueOf(bytes))
|
||||
return nil
|
||||
}
|
||||
// Repeated field.
|
||||
if tok.value == "[" {
|
||||
// Repeated field with list notation, like [1,2,3].
|
||||
for {
|
||||
fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem()))
|
||||
err := p.readAny(fv.Index(fv.Len()-1), props)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tok := p.next()
|
||||
if tok.err != nil {
|
||||
return tok.err
|
||||
}
|
||||
if tok.value == "]" {
|
||||
break
|
||||
}
|
||||
if tok.value != "," {
|
||||
return p.errorf("Expected ']' or ',' found %q", tok.value)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
// One value of the repeated field.
|
||||
p.back()
|
||||
fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem()))
|
||||
return p.readAny(fv.Index(fv.Len()-1), props)
|
||||
case reflect.Bool:
|
||||
// Either "true", "false", 1 or 0.
|
||||
switch tok.value {
|
||||
case "true", "1":
|
||||
fv.SetBool(true)
|
||||
return nil
|
||||
case "false", "0":
|
||||
fv.SetBool(false)
|
||||
return nil
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
v := tok.value
|
||||
// Ignore 'f' for compatibility with output generated by C++, but don't
|
||||
// remove 'f' when the value is "-inf" or "inf".
|
||||
if strings.HasSuffix(v, "f") && tok.value != "-inf" && tok.value != "inf" {
|
||||
v = v[:len(v)-1]
|
||||
}
|
||||
if f, err := strconv.ParseFloat(v, fv.Type().Bits()); err == nil {
|
||||
fv.SetFloat(f)
|
||||
return nil
|
||||
}
|
||||
case reflect.Int32:
|
||||
if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil {
|
||||
fv.SetInt(x)
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(props.Enum) == 0 {
|
||||
break
|
||||
}
|
||||
m, ok := enumValueMaps[props.Enum]
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
x, ok := m[tok.value]
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
fv.SetInt(int64(x))
|
||||
return nil
|
||||
case reflect.Int64:
|
||||
if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil {
|
||||
fv.SetInt(x)
|
||||
return nil
|
||||
}
|
||||
|
||||
case reflect.Ptr:
|
||||
// A basic field (indirected through pointer), or a repeated message/group
|
||||
p.back()
|
||||
fv.Set(reflect.New(fv.Type().Elem()))
|
||||
return p.readAny(fv.Elem(), props)
|
||||
case reflect.String:
|
||||
if tok.value[0] == '"' || tok.value[0] == '\'' {
|
||||
fv.SetString(tok.unquoted)
|
||||
return nil
|
||||
}
|
||||
case reflect.Struct:
|
||||
var terminator string
|
||||
switch tok.value {
|
||||
case "{":
|
||||
terminator = "}"
|
||||
case "<":
|
||||
terminator = ">"
|
||||
default:
|
||||
return p.errorf("expected '{' or '<', found %q", tok.value)
|
||||
}
|
||||
// TODO: Handle nested messages which implement encoding.TextUnmarshaler.
|
||||
return p.readStruct(fv, terminator)
|
||||
case reflect.Uint32:
|
||||
if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil {
|
||||
fv.SetUint(uint64(x))
|
||||
return nil
|
||||
}
|
||||
case reflect.Uint64:
|
||||
if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil {
|
||||
fv.SetUint(x)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return p.errorf("invalid %v: %v", v.Type(), tok.value)
|
||||
}
|
||||
|
||||
// UnmarshalText reads a protocol buffer in Text format. UnmarshalText resets pb
|
||||
// before starting to unmarshal, so any existing data in pb is always removed.
|
||||
// If a required field is not set and no other error occurs,
|
||||
// UnmarshalText returns *RequiredNotSetError.
|
||||
func UnmarshalText(s string, pb Message) error {
|
||||
if um, ok := pb.(encoding.TextUnmarshaler); ok {
|
||||
err := um.UnmarshalText([]byte(s))
|
||||
return err
|
||||
}
|
||||
pb.Reset()
|
||||
v := reflect.ValueOf(pb)
|
||||
if pe := newTextParser(s).readStruct(v.Elem(), ""); pe != nil {
|
||||
return pe
|
||||
}
|
||||
return nil
|
||||
}
|
||||
238
Godeps/_workspace/src/github.com/google/go-querystring/query/encode_test.go
generated
vendored
238
Godeps/_workspace/src/github.com/google/go-querystring/query/encode_test.go
generated
vendored
@@ -1,238 +0,0 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestValues_types(t *testing.T) {
|
||||
str := "string"
|
||||
strPtr := &str
|
||||
|
||||
tests := []struct {
|
||||
in interface{}
|
||||
want url.Values
|
||||
}{
|
||||
{
|
||||
// basic primitives
|
||||
struct {
|
||||
A string
|
||||
B int
|
||||
C uint
|
||||
D float32
|
||||
E bool
|
||||
}{},
|
||||
url.Values{
|
||||
"A": {""},
|
||||
"B": {"0"},
|
||||
"C": {"0"},
|
||||
"D": {"0"},
|
||||
"E": {"false"},
|
||||
},
|
||||
},
|
||||
{
|
||||
// pointers
|
||||
struct {
|
||||
A *string
|
||||
B *int
|
||||
C **string
|
||||
}{A: strPtr, C: &strPtr},
|
||||
url.Values{
|
||||
"A": {str},
|
||||
"B": {""},
|
||||
"C": {str},
|
||||
},
|
||||
},
|
||||
{
|
||||
// slices and arrays
|
||||
struct {
|
||||
A []string
|
||||
B []string `url:",comma"`
|
||||
C []string `url:",space"`
|
||||
D [2]string
|
||||
E [2]string `url:",comma"`
|
||||
F [2]string `url:",space"`
|
||||
G []*string `url:",space"`
|
||||
H []bool `url:",int,space"`
|
||||
}{
|
||||
A: []string{"a", "b"},
|
||||
B: []string{"a", "b"},
|
||||
C: []string{"a", "b"},
|
||||
D: [2]string{"a", "b"},
|
||||
E: [2]string{"a", "b"},
|
||||
F: [2]string{"a", "b"},
|
||||
G: []*string{&str, &str},
|
||||
H: []bool{true, false},
|
||||
},
|
||||
url.Values{
|
||||
"A": {"a", "b"},
|
||||
"B": {"a,b"},
|
||||
"C": {"a b"},
|
||||
"D": {"a", "b"},
|
||||
"E": {"a,b"},
|
||||
"F": {"a b"},
|
||||
"G": {"string string"},
|
||||
"H": {"1 0"},
|
||||
},
|
||||
},
|
||||
{
|
||||
// other types
|
||||
struct {
|
||||
A time.Time
|
||||
B time.Time `url:",unix"`
|
||||
C bool `url:",int"`
|
||||
D bool `url:",int"`
|
||||
}{
|
||||
A: time.Date(2000, 1, 1, 12, 34, 56, 0, time.UTC),
|
||||
B: time.Date(2000, 1, 1, 12, 34, 56, 0, time.UTC),
|
||||
C: true,
|
||||
D: false,
|
||||
},
|
||||
url.Values{
|
||||
"A": {"2000-01-01T12:34:56Z"},
|
||||
"B": {"946730096"},
|
||||
"C": {"1"},
|
||||
"D": {"0"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
v, err := Values(tt.in)
|
||||
if err != nil {
|
||||
t.Errorf("%d. Values(%q) returned error: %v", i, tt.in, err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(tt.want, v) {
|
||||
t.Errorf("%d. Values(%q) returned %v, want %v", i, tt.in, v, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestValues_omitEmpty(t *testing.T) {
|
||||
str := ""
|
||||
s := struct {
|
||||
a string
|
||||
A string
|
||||
B string `url:",omitempty"`
|
||||
C string `url:"-"`
|
||||
D string `url:"omitempty"` // actually named omitempty, not an option
|
||||
E *string `url:",omitempty"`
|
||||
}{E: &str}
|
||||
|
||||
v, err := Values(s)
|
||||
if err != nil {
|
||||
t.Errorf("Values(%q) returned error: %v", s, err)
|
||||
}
|
||||
|
||||
want := url.Values{
|
||||
"A": {""},
|
||||
"omitempty": {""},
|
||||
"E": {""}, // E is included because the pointer is not empty, even though the string being pointed to is
|
||||
}
|
||||
if !reflect.DeepEqual(want, v) {
|
||||
t.Errorf("Values(%q) returned %v, want %v", s, v, want)
|
||||
}
|
||||
}
|
||||
|
||||
type A struct {
|
||||
B
|
||||
}
|
||||
|
||||
type B struct {
|
||||
C string
|
||||
}
|
||||
|
||||
type D struct {
|
||||
B
|
||||
C string
|
||||
}
|
||||
|
||||
func TestValues_embeddedStructs(t *testing.T) {
|
||||
tests := []struct {
|
||||
in interface{}
|
||||
want url.Values
|
||||
}{
|
||||
{
|
||||
A{B{C: "foo"}},
|
||||
url.Values{"C": {"foo"}},
|
||||
},
|
||||
{
|
||||
D{B: B{C: "bar"}, C: "foo"},
|
||||
url.Values{"C": {"foo", "bar"}},
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
v, err := Values(tt.in)
|
||||
if err != nil {
|
||||
t.Errorf("%d. Values(%q) returned error: %v", i, tt.in, err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(tt.want, v) {
|
||||
t.Errorf("%d. Values(%q) returned %v, want %v", i, tt.in, v, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestValues_invalidInput(t *testing.T) {
|
||||
_, err := Values("")
|
||||
if err == nil {
|
||||
t.Errorf("expected Values() to return an error on invalid input")
|
||||
}
|
||||
}
|
||||
|
||||
type EncodedArgs []string
|
||||
|
||||
func (m EncodedArgs) EncodeValues(key string, v *url.Values) error {
|
||||
for i, arg := range m {
|
||||
v.Set(fmt.Sprintf("%s.%d", key, i), arg)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func TestValues_Marshaler(t *testing.T) {
|
||||
s := struct {
|
||||
Args EncodedArgs `url:"arg"`
|
||||
}{[]string{"a", "b", "c"}}
|
||||
v, err := Values(s)
|
||||
if err != nil {
|
||||
t.Errorf("Values(%q) returned error: %v", s, err)
|
||||
}
|
||||
|
||||
want := url.Values{
|
||||
"arg.0": {"a"},
|
||||
"arg.1": {"b"},
|
||||
"arg.2": {"c"},
|
||||
}
|
||||
if !reflect.DeepEqual(want, v) {
|
||||
t.Errorf("Values(%q) returned %v, want %v", s, v, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTagParsing(t *testing.T) {
|
||||
name, opts := parseTag("field,foobar,foo")
|
||||
if name != "field" {
|
||||
t.Fatalf("name = %q, want field", name)
|
||||
}
|
||||
for _, tt := range []struct {
|
||||
opt string
|
||||
want bool
|
||||
}{
|
||||
{"foobar", true},
|
||||
{"foo", true},
|
||||
{"bar", false},
|
||||
{"field", false},
|
||||
} {
|
||||
if opts.Contains(tt.opt) != tt.want {
|
||||
t.Errorf("Contains(%q) = %v", tt.opt, !tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
104
Godeps/_workspace/src/github.com/mitchellh/colorstring/colorstring_test.go
generated
vendored
104
Godeps/_workspace/src/github.com/mitchellh/colorstring/colorstring_test.go
generated
vendored
@@ -1,104 +0,0 @@
|
||||
package colorstring
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestColor(t *testing.T) {
|
||||
cases := []struct {
|
||||
Input, Output string
|
||||
}{
|
||||
{
|
||||
Input: "foo",
|
||||
Output: "foo",
|
||||
},
|
||||
|
||||
{
|
||||
Input: "[blue]foo",
|
||||
Output: "\033[34mfoo\033[0m",
|
||||
},
|
||||
|
||||
{
|
||||
Input: "foo[blue]foo",
|
||||
Output: "foo\033[34mfoo\033[0m",
|
||||
},
|
||||
|
||||
{
|
||||
Input: "foo[what]foo",
|
||||
Output: "foo[what]foo",
|
||||
},
|
||||
{
|
||||
Input: "foo[_blue_]foo",
|
||||
Output: "foo\033[44mfoo\033[0m",
|
||||
},
|
||||
{
|
||||
Input: "foo[bold]foo",
|
||||
Output: "foo\033[1mfoo\033[0m",
|
||||
},
|
||||
{
|
||||
Input: "[blue]foo[bold]bar",
|
||||
Output: "\033[34mfoo\033[1mbar\033[0m",
|
||||
},
|
||||
{
|
||||
Input: "[underline]foo[reset]bar",
|
||||
Output: "\033[4mfoo\033[0mbar\033[0m",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
actual := Color(tc.Input)
|
||||
if actual != tc.Output {
|
||||
t.Errorf(
|
||||
"Input: %#v\n\nOutput: %#v\n\nExpected: %#v",
|
||||
tc.Input,
|
||||
actual,
|
||||
tc.Output)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestColorizeColor_disable(t *testing.T) {
|
||||
c := def
|
||||
c.Disable = true
|
||||
|
||||
cases := []struct {
|
||||
Input, Output string
|
||||
}{
|
||||
{
|
||||
"[blue]foo",
|
||||
"foo",
|
||||
},
|
||||
|
||||
{
|
||||
"[foo]bar",
|
||||
"[foo]bar",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
actual := c.Color(tc.Input)
|
||||
if actual != tc.Output {
|
||||
t.Errorf(
|
||||
"Input: %#v\n\nOutput: %#v\n\nExpected: %#v",
|
||||
tc.Input,
|
||||
actual,
|
||||
tc.Output)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestColorizeColor_noReset(t *testing.T) {
|
||||
c := def
|
||||
c.Reset = false
|
||||
|
||||
input := "[blue]foo"
|
||||
output := "\033[34mfoo"
|
||||
actual := c.Color(input)
|
||||
if actual != output {
|
||||
t.Errorf(
|
||||
"Input: %#v\n\nOutput: %#v\n\nExpected: %#v",
|
||||
input,
|
||||
actual,
|
||||
output)
|
||||
}
|
||||
}
|
||||
91
Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_suite_test.go
generated
vendored
91
Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_suite_test.go
generated
vendored
@@ -1,91 +0,0 @@
|
||||
package nodot_test
|
||||
|
||||
import (
|
||||
"github.com/onsi/ginkgo"
|
||||
"github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNodot(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Nodot Suite")
|
||||
}
|
||||
|
||||
// Declarations for Ginkgo DSL
|
||||
type Done ginkgo.Done
|
||||
type Benchmarker ginkgo.Benchmarker
|
||||
|
||||
var GinkgoWriter = ginkgo.GinkgoWriter
|
||||
var GinkgoParallelNode = ginkgo.GinkgoParallelNode
|
||||
var GinkgoT = ginkgo.GinkgoT
|
||||
var CurrentGinkgoTestDescription = ginkgo.CurrentGinkgoTestDescription
|
||||
var RunSpecs = ginkgo.RunSpecs
|
||||
var RunSpecsWithDefaultAndCustomReporters = ginkgo.RunSpecsWithDefaultAndCustomReporters
|
||||
var RunSpecsWithCustomReporters = ginkgo.RunSpecsWithCustomReporters
|
||||
var Fail = ginkgo.Fail
|
||||
var GinkgoRecover = ginkgo.GinkgoRecover
|
||||
var Describe = ginkgo.Describe
|
||||
var FDescribe = ginkgo.FDescribe
|
||||
var PDescribe = ginkgo.PDescribe
|
||||
var XDescribe = ginkgo.XDescribe
|
||||
var Context = ginkgo.Context
|
||||
var FContext = ginkgo.FContext
|
||||
var PContext = ginkgo.PContext
|
||||
var XContext = ginkgo.XContext
|
||||
var It = ginkgo.It
|
||||
var FIt = ginkgo.FIt
|
||||
var PIt = ginkgo.PIt
|
||||
var XIt = ginkgo.XIt
|
||||
var Measure = ginkgo.Measure
|
||||
var FMeasure = ginkgo.FMeasure
|
||||
var PMeasure = ginkgo.PMeasure
|
||||
var XMeasure = ginkgo.XMeasure
|
||||
var BeforeSuite = ginkgo.BeforeSuite
|
||||
var AfterSuite = ginkgo.AfterSuite
|
||||
var SynchronizedBeforeSuite = ginkgo.SynchronizedBeforeSuite
|
||||
var SynchronizedAfterSuite = ginkgo.SynchronizedAfterSuite
|
||||
var BeforeEach = ginkgo.BeforeEach
|
||||
var JustBeforeEach = ginkgo.JustBeforeEach
|
||||
var AfterEach = ginkgo.AfterEach
|
||||
|
||||
// Declarations for Gomega DSL
|
||||
var RegisterFailHandler = gomega.RegisterFailHandler
|
||||
var RegisterTestingT = gomega.RegisterTestingT
|
||||
var InterceptGomegaFailures = gomega.InterceptGomegaFailures
|
||||
var Ω = gomega.Ω
|
||||
var Expect = gomega.Expect
|
||||
var ExpectWithOffset = gomega.ExpectWithOffset
|
||||
var Eventually = gomega.Eventually
|
||||
var EventuallyWithOffset = gomega.EventuallyWithOffset
|
||||
var Consistently = gomega.Consistently
|
||||
var ConsistentlyWithOffset = gomega.ConsistentlyWithOffset
|
||||
var SetDefaultEventuallyTimeout = gomega.SetDefaultEventuallyTimeout
|
||||
var SetDefaultEventuallyPollingInterval = gomega.SetDefaultEventuallyPollingInterval
|
||||
var SetDefaultConsistentlyDuration = gomega.SetDefaultConsistentlyDuration
|
||||
var SetDefaultConsistentlyPollingInterval = gomega.SetDefaultConsistentlyPollingInterval
|
||||
|
||||
// Declarations for Gomega Matchers
|
||||
var Equal = gomega.Equal
|
||||
var BeEquivalentTo = gomega.BeEquivalentTo
|
||||
var BeNil = gomega.BeNil
|
||||
var BeTrue = gomega.BeTrue
|
||||
var BeFalse = gomega.BeFalse
|
||||
var HaveOccurred = gomega.HaveOccurred
|
||||
var MatchError = gomega.MatchError
|
||||
var BeClosed = gomega.BeClosed
|
||||
var Receive = gomega.Receive
|
||||
var MatchRegexp = gomega.MatchRegexp
|
||||
var ContainSubstring = gomega.ContainSubstring
|
||||
var MatchJSON = gomega.MatchJSON
|
||||
var BeEmpty = gomega.BeEmpty
|
||||
var HaveLen = gomega.HaveLen
|
||||
var BeZero = gomega.BeZero
|
||||
var ContainElement = gomega.ContainElement
|
||||
var ConsistOf = gomega.ConsistOf
|
||||
var HaveKey = gomega.HaveKey
|
||||
var HaveKeyWithValue = gomega.HaveKeyWithValue
|
||||
var BeNumerically = gomega.BeNumerically
|
||||
var BeTemporally = gomega.BeTemporally
|
||||
var BeAssignableToTypeOf = gomega.BeAssignableToTypeOf
|
||||
var Panic = gomega.Panic
|
||||
81
Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_test.go
generated
vendored
81
Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_test.go
generated
vendored
@@ -1,81 +0,0 @@
|
||||
package nodot_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo/ginkgo/nodot"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var _ = Describe("ApplyNoDot", func() {
|
||||
var result string
|
||||
|
||||
apply := func(input string) string {
|
||||
output, err := ApplyNoDot([]byte(input))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
return string(output)
|
||||
}
|
||||
|
||||
Context("when no declarations have been imported yet", func() {
|
||||
BeforeEach(func() {
|
||||
result = apply("")
|
||||
})
|
||||
|
||||
It("should add headings for the various declarations", func() {
|
||||
Ω(result).Should(ContainSubstring("// Declarations for Ginkgo DSL"))
|
||||
Ω(result).Should(ContainSubstring("// Declarations for Gomega DSL"))
|
||||
Ω(result).Should(ContainSubstring("// Declarations for Gomega Matchers"))
|
||||
})
|
||||
|
||||
It("should import Ginkgo's declarations", func() {
|
||||
Ω(result).Should(ContainSubstring("var It = ginkgo.It"))
|
||||
Ω(result).Should(ContainSubstring("var XDescribe = ginkgo.XDescribe"))
|
||||
})
|
||||
|
||||
It("should import Ginkgo's types", func() {
|
||||
Ω(result).Should(ContainSubstring("type Done ginkgo.Done"))
|
||||
Ω(result).Should(ContainSubstring("type Benchmarker ginkgo.Benchmarker"))
|
||||
Ω(strings.Count(result, "type ")).Should(Equal(2))
|
||||
})
|
||||
|
||||
It("should import Gomega's DSL and matchers", func() {
|
||||
Ω(result).Should(ContainSubstring("var Ω = gomega.Ω"))
|
||||
Ω(result).Should(ContainSubstring("var ContainSubstring = gomega.ContainSubstring"))
|
||||
Ω(result).Should(ContainSubstring("var Equal = gomega.Equal"))
|
||||
})
|
||||
|
||||
It("should not import blacklisted things", func() {
|
||||
Ω(result).ShouldNot(ContainSubstring("GINKGO_VERSION"))
|
||||
Ω(result).ShouldNot(ContainSubstring("GINKGO_PANIC"))
|
||||
Ω(result).ShouldNot(ContainSubstring("GOMEGA_VERSION"))
|
||||
})
|
||||
})
|
||||
|
||||
It("should be idempotent (module empty lines - go fmt can fix those for us)", func() {
|
||||
first := apply("")
|
||||
second := apply(first)
|
||||
first = strings.Trim(first, "\n")
|
||||
second = strings.Trim(second, "\n")
|
||||
Ω(first).Should(Equal(second))
|
||||
})
|
||||
|
||||
It("should not mess with other things in the input", func() {
|
||||
result = apply("var MyThing = SomethingThatsMine")
|
||||
Ω(result).Should(ContainSubstring("var MyThing = SomethingThatsMine"))
|
||||
})
|
||||
|
||||
Context("when the user has redefined a name", func() {
|
||||
It("should honor the redefinition", func() {
|
||||
result = apply(`
|
||||
var _ = gomega.Ω
|
||||
var When = ginkgo.It
|
||||
`)
|
||||
|
||||
Ω(result).Should(ContainSubstring("var _ = gomega.Ω"))
|
||||
Ω(result).ShouldNot(ContainSubstring("var Ω = gomega.Ω"))
|
||||
|
||||
Ω(result).Should(ContainSubstring("var When = ginkgo.It"))
|
||||
Ω(result).ShouldNot(ContainSubstring("var It = ginkgo.It"))
|
||||
|
||||
Ω(result).Should(ContainSubstring("var Context = ginkgo.Context"))
|
||||
})
|
||||
})
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package testsuite_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTestsuite(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Testsuite Suite")
|
||||
}
|
||||
167
Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_test.go
generated
vendored
167
Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_test.go
generated
vendored
@@ -1,167 +0,0 @@
|
||||
package testsuite_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/ginkgo/testsuite"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("TestSuite", func() {
|
||||
var tmpDir string
|
||||
var relTmpDir string
|
||||
|
||||
writeFile := func(folder string, filename string, content string, mode os.FileMode) {
|
||||
path := filepath.Join(tmpDir, folder)
|
||||
err := os.MkdirAll(path, 0700)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
path = filepath.Join(path, filename)
|
||||
ioutil.WriteFile(path, []byte(content), mode)
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
var err error
|
||||
tmpDir, err = ioutil.TempDir("/tmp", "ginkgo")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
relTmpDir, err = filepath.Rel(cwd, tmpDir)
|
||||
relTmpDir = "./" + relTmpDir
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
//go files in the root directory (no tests)
|
||||
writeFile("/", "main.go", "package main", 0666)
|
||||
|
||||
//non-go files in a nested directory
|
||||
writeFile("/redherring", "big_test.jpg", "package ginkgo", 0666)
|
||||
|
||||
//non-ginkgo tests in a nested directory
|
||||
writeFile("/professorplum", "professorplum_test.go", `import "testing"`, 0666)
|
||||
|
||||
//ginkgo tests in a nested directory
|
||||
writeFile("/colonelmustard", "colonelmustard_test.go", `import "github.com/onsi/ginkgo"`, 0666)
|
||||
|
||||
//ginkgo tests in a deeply nested directory
|
||||
writeFile("/colonelmustard/library", "library_test.go", `import "github.com/onsi/ginkgo"`, 0666)
|
||||
|
||||
//a precompiled ginkgo test
|
||||
writeFile("/precompiled-dir", "precompiled.test", `fake-binary-file`, 0777)
|
||||
writeFile("/precompiled-dir", "some-other-binary", `fake-binary-file`, 0777)
|
||||
writeFile("/precompiled-dir", "nonexecutable.test", `fake-binary-file`, 0666)
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
os.RemoveAll(tmpDir)
|
||||
})
|
||||
|
||||
Describe("Finding precompiled test suites", func() {
|
||||
Context("if pointed at an executable file that ends with .test", func() {
|
||||
It("should return a precompiled test suite", func() {
|
||||
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "precompiled.test"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(suite).Should(Equal(TestSuite{
|
||||
Path: relTmpDir + "/precompiled-dir",
|
||||
PackageName: "precompiled",
|
||||
IsGinkgo: true,
|
||||
Precompiled: true,
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("if pointed at a directory", func() {
|
||||
It("should error", func() {
|
||||
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir"))
|
||||
Ω(suite).Should(BeZero())
|
||||
Ω(err).Should(HaveOccurred())
|
||||
})
|
||||
})
|
||||
|
||||
Context("if pointed at an executable that doesn't have .test", func() {
|
||||
It("should error", func() {
|
||||
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "some-other-binary"))
|
||||
Ω(suite).Should(BeZero())
|
||||
Ω(err).Should(HaveOccurred())
|
||||
})
|
||||
})
|
||||
|
||||
Context("if pointed at a .test that isn't executable", func() {
|
||||
It("should error", func() {
|
||||
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "nonexecutable.test"))
|
||||
Ω(suite).Should(BeZero())
|
||||
Ω(err).Should(HaveOccurred())
|
||||
})
|
||||
})
|
||||
|
||||
Context("if pointed at a nonexisting file", func() {
|
||||
It("should error", func() {
|
||||
suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "nope-nothing-to-see-here"))
|
||||
Ω(suite).Should(BeZero())
|
||||
Ω(err).Should(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("scanning for suites in a directory", func() {
|
||||
Context("when there are no tests in the specified directory", func() {
|
||||
It("should come up empty", func() {
|
||||
suites := SuitesInDir(tmpDir, false)
|
||||
Ω(suites).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when there are ginkgo tests in the specified directory", func() {
|
||||
It("should return an appropriately configured suite", func() {
|
||||
suites := SuitesInDir(filepath.Join(tmpDir, "colonelmustard"), false)
|
||||
Ω(suites).Should(HaveLen(1))
|
||||
|
||||
Ω(suites[0].Path).Should(Equal(relTmpDir + "/colonelmustard"))
|
||||
Ω(suites[0].PackageName).Should(Equal("colonelmustard"))
|
||||
Ω(suites[0].IsGinkgo).Should(BeTrue())
|
||||
Ω(suites[0].Precompiled).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when there are non-ginkgo tests in the specified directory", func() {
|
||||
It("should return an appropriately configured suite", func() {
|
||||
suites := SuitesInDir(filepath.Join(tmpDir, "professorplum"), false)
|
||||
Ω(suites).Should(HaveLen(1))
|
||||
|
||||
Ω(suites[0].Path).Should(Equal(relTmpDir + "/professorplum"))
|
||||
Ω(suites[0].PackageName).Should(Equal("professorplum"))
|
||||
Ω(suites[0].IsGinkgo).Should(BeFalse())
|
||||
Ω(suites[0].Precompiled).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when recursively scanning", func() {
|
||||
It("should return suites for corresponding test suites, only", func() {
|
||||
suites := SuitesInDir(tmpDir, true)
|
||||
Ω(suites).Should(HaveLen(3))
|
||||
|
||||
Ω(suites).Should(ContainElement(TestSuite{
|
||||
Path: relTmpDir + "/colonelmustard",
|
||||
PackageName: "colonelmustard",
|
||||
IsGinkgo: true,
|
||||
Precompiled: false,
|
||||
}))
|
||||
Ω(suites).Should(ContainElement(TestSuite{
|
||||
Path: relTmpDir + "/professorplum",
|
||||
PackageName: "professorplum",
|
||||
IsGinkgo: false,
|
||||
Precompiled: false,
|
||||
}))
|
||||
Ω(suites).Should(ContainElement(TestSuite{
|
||||
Path: relTmpDir + "/colonelmustard/library",
|
||||
PackageName: "library",
|
||||
IsGinkgo: true,
|
||||
Precompiled: false,
|
||||
}))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
121
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/convert_test.go
generated
vendored
121
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/convert_test.go
generated
vendored
@@ -1,121 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("ginkgo convert", func() {
|
||||
var tmpDir string
|
||||
|
||||
readConvertedFileNamed := func(pathComponents ...string) string {
|
||||
pathToFile := filepath.Join(tmpDir, "convert_fixtures", filepath.Join(pathComponents...))
|
||||
bytes, err := ioutil.ReadFile(pathToFile)
|
||||
ExpectWithOffset(1, err).NotTo(HaveOccurred())
|
||||
|
||||
return string(bytes)
|
||||
}
|
||||
|
||||
readGoldMasterNamed := func(filename string) string {
|
||||
bytes, err := ioutil.ReadFile(filepath.Join("_fixtures", "convert_goldmasters", filename))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
return string(bytes)
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
var err error
|
||||
|
||||
tmpDir, err = ioutil.TempDir("", "ginkgo-convert")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
err = exec.Command("cp", "-r", filepath.Join("_fixtures", "convert_fixtures"), tmpDir).Run()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
JustBeforeEach(func() {
|
||||
cwd, err := os.Getwd()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
relPath, err := filepath.Rel(cwd, filepath.Join(tmpDir, "convert_fixtures"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
cmd := exec.Command(pathToGinkgo, "convert", relPath)
|
||||
cmd.Env = os.Environ()
|
||||
for i, env := range cmd.Env {
|
||||
if strings.HasPrefix(env, "PATH") {
|
||||
cmd.Env[i] = cmd.Env[i] + ":" + filepath.Dir(pathToGinkgo)
|
||||
break
|
||||
}
|
||||
}
|
||||
err = cmd.Run()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
err := os.RemoveAll(tmpDir)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("rewrites xunit tests as ginkgo tests", func() {
|
||||
convertedFile := readConvertedFileNamed("xunit_test.go")
|
||||
goldMaster := readGoldMasterNamed("xunit_test.go")
|
||||
Ω(convertedFile).Should(Equal(goldMaster))
|
||||
})
|
||||
|
||||
It("rewrites all usages of *testing.T as mr.T()", func() {
|
||||
convertedFile := readConvertedFileNamed("extra_functions_test.go")
|
||||
goldMaster := readGoldMasterNamed("extra_functions_test.go")
|
||||
Ω(convertedFile).Should(Equal(goldMaster))
|
||||
})
|
||||
|
||||
It("rewrites tests in the package dir that belong to other packages", func() {
|
||||
convertedFile := readConvertedFileNamed("outside_package_test.go")
|
||||
goldMaster := readGoldMasterNamed("outside_package_test.go")
|
||||
Ω(convertedFile).Should(Equal(goldMaster))
|
||||
})
|
||||
|
||||
It("rewrites tests in nested packages", func() {
|
||||
convertedFile := readConvertedFileNamed("nested", "nested_test.go")
|
||||
goldMaster := readGoldMasterNamed("nested_test.go")
|
||||
Ω(convertedFile).Should(Equal(goldMaster))
|
||||
})
|
||||
|
||||
Context("ginkgo test suite files", func() {
|
||||
It("creates a ginkgo test suite file for the package you specified", func() {
|
||||
testsuite := readConvertedFileNamed("convert_fixtures_suite_test.go")
|
||||
goldMaster := readGoldMasterNamed("suite_test.go")
|
||||
Ω(testsuite).Should(Equal(goldMaster))
|
||||
})
|
||||
|
||||
It("converts go tests in deeply nested packages (some may not contain go files)", func() {
|
||||
testsuite := readConvertedFileNamed("nested_without_gofiles", "subpackage", "nested_subpackage_test.go")
|
||||
goldMaster := readGoldMasterNamed("nested_subpackage_test.go")
|
||||
Ω(testsuite).Should(Equal(goldMaster))
|
||||
})
|
||||
|
||||
It("creates ginkgo test suites for all nested packages", func() {
|
||||
testsuite := readConvertedFileNamed("nested", "nested_suite_test.go")
|
||||
goldMaster := readGoldMasterNamed("nested_suite_test.go")
|
||||
Ω(testsuite).Should(Equal(goldMaster))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with an existing test suite file", func() {
|
||||
BeforeEach(func() {
|
||||
goldMaster := readGoldMasterNamed("fixtures_suite_test.go")
|
||||
err := ioutil.WriteFile(filepath.Join(tmpDir, "convert_fixtures", "tmp_suite_test.go"), []byte(goldMaster), 0600)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("gracefully handles existing test suite files", func() {
|
||||
//nothing should have gone wrong!
|
||||
})
|
||||
})
|
||||
})
|
||||
34
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/coverage_test.go
generated
vendored
34
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/coverage_test.go
generated
vendored
@@ -1,34 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
"os"
|
||||
"os/exec"
|
||||
)
|
||||
|
||||
var _ = Describe("Coverage Specs", func() {
|
||||
AfterEach(func() {
|
||||
os.RemoveAll("./_fixtures/coverage_fixture/coverage_fixture.coverprofile")
|
||||
})
|
||||
|
||||
It("runs coverage analysis in series and in parallel", func() {
|
||||
session := startGinkgo("./_fixtures/coverage_fixture", "-cover")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
Ω(output).Should(ContainSubstring("coverage: 80.0% of statements"))
|
||||
|
||||
serialCoverProfileOutput, err := exec.Command("go", "tool", "cover", "-func=./_fixtures/coverage_fixture/coverage_fixture.coverprofile").CombinedOutput()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
os.RemoveAll("./_fixtures/coverage_fixture/coverage_fixture.coverprofile")
|
||||
|
||||
Eventually(startGinkgo("./_fixtures/coverage_fixture", "-cover", "-nodes=4")).Should(gexec.Exit(0))
|
||||
|
||||
parallelCoverProfileOutput, err := exec.Command("go", "tool", "cover", "-func=./_fixtures/coverage_fixture/coverage_fixture.coverprofile").CombinedOutput()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(parallelCoverProfileOutput).Should(Equal(serialCoverProfileOutput))
|
||||
})
|
||||
})
|
||||
48
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/fail_test.go
generated
vendored
48
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/fail_test.go
generated
vendored
@@ -1,48 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("Failing Specs", func() {
|
||||
var pathToTest string
|
||||
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("failing")
|
||||
copyIn("fail_fixture", pathToTest)
|
||||
})
|
||||
|
||||
It("should fail in all the possible ways", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS"))
|
||||
|
||||
Ω(output).Should(ContainSubstring("a top level failure on line 9"))
|
||||
Ω(output).Should(ContainSubstring("fail_fixture_test.go:9"))
|
||||
Ω(output).Should(ContainSubstring("an async top level failure on line 14"))
|
||||
Ω(output).Should(ContainSubstring("fail_fixture_test.go:14"))
|
||||
Ω(output).Should(ContainSubstring("a top level goroutine failure on line 21"))
|
||||
Ω(output).Should(ContainSubstring("fail_fixture_test.go:21"))
|
||||
|
||||
Ω(output).Should(ContainSubstring("a sync failure"))
|
||||
Ω(output).Should(MatchRegexp(`Test Panicked\n\s+a sync panic`))
|
||||
Ω(output).Should(ContainSubstring("a sync FAIL failure"))
|
||||
Ω(output).Should(ContainSubstring("async timeout [It]"))
|
||||
Ω(output).Should(ContainSubstring("Timed out"))
|
||||
Ω(output).Should(ContainSubstring("an async failure"))
|
||||
Ω(output).Should(MatchRegexp(`Test Panicked\n\s+an async panic`))
|
||||
Ω(output).Should(ContainSubstring("an async FAIL failure"))
|
||||
Ω(output).Should(ContainSubstring("a goroutine FAIL failure"))
|
||||
Ω(output).Should(ContainSubstring("a goroutine failure"))
|
||||
Ω(output).Should(MatchRegexp(`Test Panicked\n\s+a goroutine panic`))
|
||||
Ω(output).Should(ContainSubstring("a measure failure"))
|
||||
Ω(output).Should(ContainSubstring("a measure FAIL failure"))
|
||||
Ω(output).Should(MatchRegexp(`Test Panicked\n\s+a measure panic`))
|
||||
|
||||
Ω(output).Should(ContainSubstring("0 Passed | 16 Failed"))
|
||||
})
|
||||
})
|
||||
176
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/flags_test.go
generated
vendored
176
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/flags_test.go
generated
vendored
@@ -1,176 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("Flags Specs", func() {
|
||||
var pathToTest string
|
||||
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("flags")
|
||||
copyIn("flags_tests", pathToTest)
|
||||
})
|
||||
|
||||
getRandomOrders := func(output string) []int {
|
||||
return []int{strings.Index(output, "RANDOM_A"), strings.Index(output, "RANDOM_B"), strings.Index(output, "RANDOM_C")}
|
||||
}
|
||||
|
||||
It("normally passes, runs measurements, prints out noisy pendings, does not randomize tests, and honors the programmatic focus", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Ran 3 samples:"), "has a measurement")
|
||||
Ω(output).Should(ContainSubstring("10 Passed"))
|
||||
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||
Ω(output).Should(ContainSubstring("1 Pending"))
|
||||
Ω(output).Should(ContainSubstring("2 Skipped"))
|
||||
Ω(output).Should(ContainSubstring("[PENDING]"))
|
||||
Ω(output).Should(ContainSubstring("marshmallow"))
|
||||
Ω(output).Should(ContainSubstring("chocolate"))
|
||||
Ω(output).Should(ContainSubstring("CUSTOM_FLAG: default"))
|
||||
Ω(output).Should(ContainSubstring("Detected Programmatic Focus - setting exit status to %d", types.GINKGO_FOCUS_EXIT_CODE))
|
||||
Ω(output).ShouldNot(ContainSubstring("smores"))
|
||||
Ω(output).ShouldNot(ContainSubstring("SLOW TEST"))
|
||||
Ω(output).ShouldNot(ContainSubstring("should honor -slowSpecThreshold"))
|
||||
|
||||
orders := getRandomOrders(output)
|
||||
Ω(orders[0]).Should(BeNumerically("<", orders[1]))
|
||||
Ω(orders[1]).Should(BeNumerically("<", orders[2]))
|
||||
})
|
||||
|
||||
It("should run a coverprofile when passed -cover", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--cover", "--focus=the focused set")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
_, err := os.Stat(filepath.Join(pathToTest, "flags.coverprofile"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(output).Should(ContainSubstring("coverage: "))
|
||||
})
|
||||
|
||||
It("should fail when there are pending tests and it is passed --failOnPending", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--failOnPending")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
})
|
||||
|
||||
It("should not print out pendings when --noisyPendings=false", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--noisyPendings=false")
|
||||
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).ShouldNot(ContainSubstring("[PENDING]"))
|
||||
Ω(output).Should(ContainSubstring("1 Pending"))
|
||||
})
|
||||
|
||||
It("should override the programmatic focus when told to focus", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--focus=smores")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("marshmallow"))
|
||||
Ω(output).Should(ContainSubstring("chocolate"))
|
||||
Ω(output).Should(ContainSubstring("smores"))
|
||||
Ω(output).Should(ContainSubstring("3 Passed"))
|
||||
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||
Ω(output).Should(ContainSubstring("0 Pending"))
|
||||
Ω(output).Should(ContainSubstring("10 Skipped"))
|
||||
})
|
||||
|
||||
It("should override the programmatic focus when told to skip", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--skip=marshmallow|failing")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).ShouldNot(ContainSubstring("marshmallow"))
|
||||
Ω(output).Should(ContainSubstring("chocolate"))
|
||||
Ω(output).Should(ContainSubstring("smores"))
|
||||
Ω(output).Should(ContainSubstring("10 Passed"))
|
||||
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||
Ω(output).Should(ContainSubstring("1 Pending"))
|
||||
Ω(output).Should(ContainSubstring("2 Skipped"))
|
||||
})
|
||||
|
||||
It("should run the race detector when told to", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--race")
|
||||
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("WARNING: DATA RACE"))
|
||||
})
|
||||
|
||||
It("should randomize tests when told to", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--randomizeAllSpecs", "--seed=21")
|
||||
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
orders := getRandomOrders(output)
|
||||
Ω(orders[0]).ShouldNot(BeNumerically("<", orders[1]))
|
||||
})
|
||||
|
||||
It("should skip measurements when told to", func() {
|
||||
session := startGinkgo(pathToTest, "--skipMeasurements")
|
||||
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).ShouldNot(ContainSubstring("Ran 3 samples:"), "has a measurement")
|
||||
Ω(output).Should(ContainSubstring("3 Skipped"))
|
||||
})
|
||||
|
||||
It("should watch for slow specs", func() {
|
||||
session := startGinkgo(pathToTest, "--slowSpecThreshold=0.05")
|
||||
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("SLOW TEST"))
|
||||
Ω(output).Should(ContainSubstring("should honor -slowSpecThreshold"))
|
||||
})
|
||||
|
||||
It("should pass additional arguments in", func() {
|
||||
session := startGinkgo(pathToTest, "--", "--customFlag=madagascar")
|
||||
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("CUSTOM_FLAG: madagascar"))
|
||||
})
|
||||
|
||||
It("should print out full stack traces for failures when told to", func() {
|
||||
session := startGinkgo(pathToTest, "--focus=a failing test", "--trace")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Full Stack Trace"))
|
||||
})
|
||||
|
||||
It("should fail fast when told to", func() {
|
||||
pathToTest = tmpPath("fail")
|
||||
copyIn("fail_fixture", pathToTest)
|
||||
session := startGinkgo(pathToTest, "--failFast")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("1 Failed"))
|
||||
Ω(output).Should(ContainSubstring("15 Skipped"))
|
||||
})
|
||||
|
||||
It("should perform a dry run when told to", func() {
|
||||
pathToTest = tmpPath("fail")
|
||||
copyIn("fail_fixture", pathToTest)
|
||||
session := startGinkgo(pathToTest, "--dryRun", "-v")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("synchronous failures"))
|
||||
Ω(output).Should(ContainSubstring("16 Specs"))
|
||||
Ω(output).Should(ContainSubstring("0 Passed"))
|
||||
Ω(output).Should(ContainSubstring("0 Failed"))
|
||||
})
|
||||
})
|
||||
89
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/integration_suite_test.go
generated
vendored
89
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/integration_suite_test.go
generated
vendored
@@ -1,89 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
var tmpDir string
|
||||
var pathToGinkgo string
|
||||
|
||||
func TestIntegration(t *testing.T) {
|
||||
SetDefaultEventuallyTimeout(15 * time.Second)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Integration Suite")
|
||||
}
|
||||
|
||||
var _ = SynchronizedBeforeSuite(func() []byte {
|
||||
pathToGinkgo, err := gexec.Build("github.com/onsi/ginkgo/ginkgo")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
return []byte(pathToGinkgo)
|
||||
}, func(computedPathToGinkgo []byte) {
|
||||
pathToGinkgo = string(computedPathToGinkgo)
|
||||
})
|
||||
|
||||
var _ = BeforeEach(func() {
|
||||
var err error
|
||||
tmpDir, err = ioutil.TempDir("", "ginkgo-run")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
var _ = AfterEach(func() {
|
||||
err := os.RemoveAll(tmpDir)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
var _ = SynchronizedAfterSuite(func() {}, func() {
|
||||
gexec.CleanupBuildArtifacts()
|
||||
})
|
||||
|
||||
func tmpPath(destination string) string {
|
||||
return filepath.Join(tmpDir, destination)
|
||||
}
|
||||
|
||||
func copyIn(fixture string, destination string) {
|
||||
err := os.MkdirAll(destination, 0777)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
filepath.Walk(filepath.Join("_fixtures", fixture), func(path string, info os.FileInfo, err error) error {
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
base := filepath.Base(path)
|
||||
|
||||
src, err := os.Open(path)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
dst, err := os.Create(filepath.Join(destination, base))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
_, err = io.Copy(dst, src)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func ginkgoCommand(dir string, args ...string) *exec.Cmd {
|
||||
cmd := exec.Command(pathToGinkgo, args...)
|
||||
cmd.Dir = dir
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func startGinkgo(dir string, args ...string) *gexec.Session {
|
||||
cmd := ginkgoCommand(dir, args...)
|
||||
session, err := gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
return session
|
||||
}
|
||||
51
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/interrupt_test.go
generated
vendored
51
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/interrupt_test.go
generated
vendored
@@ -1,51 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
"os/exec"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gbytes"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("Interrupt", func() {
|
||||
var pathToTest string
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("hanging")
|
||||
copyIn("hanging_suite", pathToTest)
|
||||
})
|
||||
|
||||
Context("when interrupting a suite", func() {
|
||||
var session *gexec.Session
|
||||
BeforeEach(func() {
|
||||
//we need to signal the actual process, so we must compile the test first
|
||||
var err error
|
||||
cmd := exec.Command("go", "test", "-c")
|
||||
cmd.Dir = pathToTest
|
||||
session, err = gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
//then run the compiled test directly
|
||||
cmd = exec.Command("./hanging.test", "--test.v=true", "--ginkgo.noColor")
|
||||
cmd.Dir = pathToTest
|
||||
session, err = gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Eventually(session).Should(gbytes.Say("Sleeping..."))
|
||||
session.Interrupt()
|
||||
Eventually(session, 1000).Should(gexec.Exit(1))
|
||||
})
|
||||
|
||||
It("should emit the contents of the GinkgoWriter", func() {
|
||||
Ω(session).Should(gbytes.Say("Just beginning"))
|
||||
Ω(session).Should(gbytes.Say("Almost there..."))
|
||||
Ω(session).Should(gbytes.Say("Hanging Out"))
|
||||
})
|
||||
|
||||
It("should run the AfterSuite", func() {
|
||||
Ω(session).Should(gbytes.Say("Heading Out After Suite"))
|
||||
})
|
||||
})
|
||||
})
|
||||
53
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/precompiled_test.go
generated
vendored
53
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/precompiled_test.go
generated
vendored
@@ -1,53 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gbytes"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("ginkgo build", func() {
|
||||
var pathToTest string
|
||||
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("passing_ginkgo_tests")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
session := startGinkgo(pathToTest, "build")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
Ω(output).Should(ContainSubstring("Compiling passing_ginkgo_tests"))
|
||||
Ω(output).Should(ContainSubstring("compiled passing_ginkgo_tests.test"))
|
||||
})
|
||||
|
||||
It("should build a test binary", func() {
|
||||
_, err := os.Stat(filepath.Join(pathToTest, "passing_ginkgo_tests.test"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should be possible to run the test binary directly", func() {
|
||||
cmd := exec.Command("./passing_ginkgo_tests.test")
|
||||
cmd.Dir = pathToTest
|
||||
session, err := gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
Ω(session).Should(gbytes.Say("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
})
|
||||
|
||||
It("should be possible to run the test binary via ginkgo", func() {
|
||||
session := startGinkgo(pathToTest, "./passing_ginkgo_tests.test")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
Ω(session).Should(gbytes.Say("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
})
|
||||
|
||||
It("should be possible to run the test binary in parallel", func() {
|
||||
session := startGinkgo(pathToTest, "--nodes=4", "--noColor", "./passing_ginkgo_tests.test")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
Ω(session).Should(gbytes.Say("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
Ω(session).Should(gbytes.Say("Running in parallel across 4 nodes"))
|
||||
})
|
||||
})
|
||||
75
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/progress_test.go
generated
vendored
75
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/progress_test.go
generated
vendored
@@ -1,75 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gbytes"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("Emitting progress", func() {
|
||||
var pathToTest string
|
||||
var session *gexec.Session
|
||||
var args []string
|
||||
|
||||
BeforeEach(func() {
|
||||
args = []string{"--noColor"}
|
||||
pathToTest = tmpPath("progress")
|
||||
copyIn("progress_fixture", pathToTest)
|
||||
})
|
||||
|
||||
JustBeforeEach(func() {
|
||||
session = startGinkgo(pathToTest, args...)
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
})
|
||||
|
||||
Context("with the -progress flag, but no -v flag", func() {
|
||||
BeforeEach(func() {
|
||||
args = append(args, "-progress")
|
||||
})
|
||||
|
||||
It("should not emit progress", func() {
|
||||
Ω(session).ShouldNot(gbytes.Say("[bB]efore"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with the -v flag", func() {
|
||||
BeforeEach(func() {
|
||||
args = append(args, "-v")
|
||||
})
|
||||
|
||||
It("should not emit progress", func() {
|
||||
Ω(session).ShouldNot(gbytes.Say(`\[BeforeEach\]`))
|
||||
Ω(session).Should(gbytes.Say(`>outer before<`))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with the -progress flag and the -v flag", func() {
|
||||
BeforeEach(func() {
|
||||
args = append(args, "-progress", "-v")
|
||||
})
|
||||
|
||||
It("should emit progress (by writing to the GinkgoWriter)", func() {
|
||||
Ω(session).Should(gbytes.Say(`\[BeforeEach\] ProgressFixture`))
|
||||
Ω(session).Should(gbytes.Say(`>outer before<`))
|
||||
|
||||
Ω(session).Should(gbytes.Say(`\[BeforeEach\] Inner Context`))
|
||||
Ω(session).Should(gbytes.Say(`>inner before<`))
|
||||
|
||||
Ω(session).Should(gbytes.Say(`\[JustBeforeEach\] ProgressFixture`))
|
||||
Ω(session).Should(gbytes.Say(`>outer just before<`))
|
||||
|
||||
Ω(session).Should(gbytes.Say(`\[JustBeforeEach\] Inner Context`))
|
||||
Ω(session).Should(gbytes.Say(`>inner just before<`))
|
||||
|
||||
Ω(session).Should(gbytes.Say(`\[It\] should emit progress as it goes`))
|
||||
Ω(session).Should(gbytes.Say(`>it<`))
|
||||
|
||||
Ω(session).Should(gbytes.Say(`\[AfterEach\] Inner Context`))
|
||||
Ω(session).Should(gbytes.Say(`>inner after<`))
|
||||
|
||||
Ω(session).Should(gbytes.Say(`\[AfterEach\] ProgressFixture`))
|
||||
Ω(session).Should(gbytes.Say(`>outer after<`))
|
||||
})
|
||||
})
|
||||
})
|
||||
373
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/run_test.go
generated
vendored
373
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/run_test.go
generated
vendored
@@ -1,373 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gbytes"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("Running Specs", func() {
|
||||
var pathToTest string
|
||||
|
||||
Context("when pointed at the current directory", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
})
|
||||
|
||||
It("should run the tests in the working directory", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("••••"))
|
||||
Ω(output).Should(ContainSubstring("SUCCESS! -- 4 Passed"))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when passed an explicit package to run", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
})
|
||||
|
||||
It("should run the ginkgo style tests", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", pathToTest)
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("••••"))
|
||||
Ω(output).Should(ContainSubstring("SUCCESS! -- 4 Passed"))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when passed a number of packages to run", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
otherPathToTest := tmpPath("other")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||
})
|
||||
|
||||
It("should run the ginkgo style tests", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "--succinct=false", "ginkgo", "./other")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when passed a number of packages to run, some of which have focused tests", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
otherPathToTest := tmpPath("other")
|
||||
focusedPathToTest := tmpPath("focused")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||
copyIn("focused_fixture", focusedPathToTest)
|
||||
})
|
||||
|
||||
It("should exit with a status code of 2 and explain why", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "--succinct=false", "-r")
|
||||
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
Ω(output).Should(ContainSubstring("Detected Programmatic Focus - setting exit status to %d", types.GINKGO_FOCUS_EXIT_CODE))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when told to skipPackages", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
otherPathToTest := tmpPath("other")
|
||||
focusedPathToTest := tmpPath("focused")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||
copyIn("focused_fixture", focusedPathToTest)
|
||||
})
|
||||
|
||||
It("should skip packages that match the list", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused", "-r")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Passing_ginkgo_tests Suite"))
|
||||
Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite"))
|
||||
Ω(output).ShouldNot(ContainSubstring("Focused_fixture Suite"))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
|
||||
Context("when all packages are skipped", func() {
|
||||
It("should not run anything, but still exit 0", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused,ginkgo", "-r")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("All tests skipped!"))
|
||||
Ω(output).ShouldNot(ContainSubstring("Passing_ginkgo_tests Suite"))
|
||||
Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite"))
|
||||
Ω(output).ShouldNot(ContainSubstring("Focused_fixture Suite"))
|
||||
Ω(output).ShouldNot(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("when there are no tests to run", func() {
|
||||
It("should exit 1", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused", "-r")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Err.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Found no test suites"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when told to randomizeSuites", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
otherPathToTest := tmpPath("other")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||
})
|
||||
|
||||
It("should skip packages that match the regexp", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "--randomizeSuites", "-r", "--seed=2")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
Ω(session).Should(gbytes.Say("More_ginkgo_tests Suite"))
|
||||
Ω(session).Should(gbytes.Say("Passing_ginkgo_tests Suite"))
|
||||
|
||||
session = startGinkgo(tmpDir, "--noColor", "--randomizeSuites", "-r", "--seed=3")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
Ω(session).Should(gbytes.Say("Passing_ginkgo_tests Suite"))
|
||||
Ω(session).Should(gbytes.Say("More_ginkgo_tests Suite"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when pointed at a package with xunit style tests", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("xunit")
|
||||
copyIn("xunit_tests", pathToTest)
|
||||
})
|
||||
|
||||
It("should run the xunit style tests", func() {
|
||||
session := startGinkgo(pathToTest)
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("--- PASS: TestAlwaysTrue"))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when pointed at a package with no tests", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("no_tests")
|
||||
copyIn("no_tests", pathToTest)
|
||||
})
|
||||
|
||||
It("should fail", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
|
||||
Ω(session.Err.Contents()).Should(ContainSubstring("Found no test suites"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when pointed at a package that fails to compile", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("does_not_compile")
|
||||
copyIn("does_not_compile", pathToTest)
|
||||
})
|
||||
|
||||
It("should fail", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Failed to compile"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when running in parallel", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
})
|
||||
|
||||
Context("with a specific number of -nodes", func() {
|
||||
It("should use the specified number of nodes", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "-succinct", "-nodes=2")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs - 2 nodes •••• SUCCESS! [\d.µs]+`))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with -p", func() {
|
||||
It("it should autocompute the number of nodes", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "-succinct", "-p")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
nodes := runtime.NumCPU()
|
||||
if nodes > 4 {
|
||||
nodes = nodes - 1
|
||||
}
|
||||
Ω(output).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs - %d nodes •••• SUCCESS! [\d.µs]+`, nodes))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("when streaming in parallel", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
})
|
||||
|
||||
It("should print output in realtime", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "-stream", "-nodes=2")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring(`[1] Parallel test node 1/2.`))
|
||||
Ω(output).Should(ContainSubstring(`[2] Parallel test node 2/2.`))
|
||||
Ω(output).Should(ContainSubstring(`[1] SUCCESS!`))
|
||||
Ω(output).Should(ContainSubstring(`[2] SUCCESS!`))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when running recursively", func() {
|
||||
BeforeEach(func() {
|
||||
passingTest := tmpPath("A")
|
||||
otherPassingTest := tmpPath("E")
|
||||
copyIn("passing_ginkgo_tests", passingTest)
|
||||
copyIn("more_ginkgo_tests", otherPassingTest)
|
||||
})
|
||||
|
||||
Context("when all the tests pass", func() {
|
||||
It("should run all the tests (in succinct mode) and succeed", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "-r")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
outputLines := strings.Split(output, "\n")
|
||||
Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS! [\d.µs]+ PASS`))
|
||||
Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs •• SUCCESS! [\d.µs]+ PASS`))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Passed"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when one of the packages has a failing tests", func() {
|
||||
BeforeEach(func() {
|
||||
failingTest := tmpPath("C")
|
||||
copyIn("failing_ginkgo_tests", failingTest)
|
||||
})
|
||||
|
||||
It("should fail and stop running tests", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "-r")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
outputLines := strings.Split(output, "\n")
|
||||
Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS! [\d.µs]+ PASS`))
|
||||
Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] Failing_ginkgo_tests Suite - 2/2 specs`))
|
||||
Ω(output).Should(ContainSubstring("• Failure"))
|
||||
Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Failed"))
|
||||
|
||||
Ω(output).Should(ContainSubstring("Summarizing 1 Failure:"))
|
||||
Ω(output).Should(ContainSubstring("[Fail] FailingGinkgoTests [It] should fail"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when one of the packages fails to compile", func() {
|
||||
BeforeEach(func() {
|
||||
doesNotCompileTest := tmpPath("C")
|
||||
copyIn("does_not_compile", doesNotCompileTest)
|
||||
})
|
||||
|
||||
It("should fail and stop running tests", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "-r")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
outputLines := strings.Split(output, "\n")
|
||||
Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS! [\d.µs]+ PASS`))
|
||||
Ω(outputLines[1]).Should(ContainSubstring("Failed to compile C:"))
|
||||
Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Failed"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when either is the case, but the keepGoing flag is set", func() {
|
||||
BeforeEach(func() {
|
||||
doesNotCompileTest := tmpPath("B")
|
||||
copyIn("does_not_compile", doesNotCompileTest)
|
||||
|
||||
failingTest := tmpPath("C")
|
||||
copyIn("failing_ginkgo_tests", failingTest)
|
||||
})
|
||||
|
||||
It("should soldier on", func() {
|
||||
session := startGinkgo(tmpDir, "--noColor", "-r", "-keepGoing")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
outputLines := strings.Split(output, "\n")
|
||||
Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS! [\d.µs]+ PASS`))
|
||||
Ω(outputLines[1]).Should(ContainSubstring("Failed to compile B:"))
|
||||
Ω(output).Should(MatchRegexp(`\[\d+\] Failing_ginkgo_tests Suite - 2/2 specs`))
|
||||
Ω(output).Should(ContainSubstring("• Failure"))
|
||||
Ω(output).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs •• SUCCESS! [\d.µs]+ PASS`))
|
||||
Ω(output).Should(ContainSubstring("Test Suite Failed"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("when told to keep going --untilItFails", func() {
|
||||
BeforeEach(func() {
|
||||
copyIn("eventually_failing", tmpDir)
|
||||
})
|
||||
|
||||
It("should keep rerunning the tests, until a failure occurs", func() {
|
||||
session := startGinkgo(tmpDir, "--untilItFails", "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
Ω(session).Should(gbytes.Say("This was attempt #1"))
|
||||
Ω(session).Should(gbytes.Say("This was attempt #2"))
|
||||
Ω(session).Should(gbytes.Say("Tests failed on attempt #3"))
|
||||
|
||||
//it should change the random seed between each test
|
||||
lines := strings.Split(string(session.Out.Contents()), "\n")
|
||||
randomSeeds := []string{}
|
||||
for _, line := range lines {
|
||||
if strings.Contains(line, "Random Seed:") {
|
||||
randomSeeds = append(randomSeeds, strings.Split(line, ": ")[1])
|
||||
}
|
||||
}
|
||||
Ω(randomSeeds[0]).ShouldNot(Equal(randomSeeds[1]))
|
||||
Ω(randomSeeds[1]).ShouldNot(Equal(randomSeeds[2]))
|
||||
Ω(randomSeeds[0]).ShouldNot(Equal(randomSeeds[2]))
|
||||
})
|
||||
})
|
||||
})
|
||||
364
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/subcommand_test.go
generated
vendored
364
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/subcommand_test.go
generated
vendored
@@ -1,364 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("Subcommand", func() {
|
||||
Describe("ginkgo bootstrap", func() {
|
||||
var pkgPath string
|
||||
BeforeEach(func() {
|
||||
pkgPath = tmpPath("foo")
|
||||
os.Mkdir(pkgPath, 0777)
|
||||
})
|
||||
|
||||
It("should generate a bootstrap file, as long as one does not exist", func() {
|
||||
session := startGinkgo(pkgPath, "bootstrap")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("foo_suite_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_test"))
|
||||
Ω(content).Should(ContainSubstring("func TestFoo(t *testing.T) {"))
|
||||
Ω(content).Should(ContainSubstring("RegisterFailHandler"))
|
||||
Ω(content).Should(ContainSubstring("RunSpecs"))
|
||||
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||
|
||||
session = startGinkgo(pkgPath, "bootstrap")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output = session.Out.Contents()
|
||||
Ω(output).Should(ContainSubstring("foo_suite_test.go already exists"))
|
||||
})
|
||||
|
||||
It("should import nodot declarations when told to", func() {
|
||||
session := startGinkgo(pkgPath, "bootstrap", "--nodot")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("foo_suite_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_test"))
|
||||
Ω(content).Should(ContainSubstring("func TestFoo(t *testing.T) {"))
|
||||
Ω(content).Should(ContainSubstring("RegisterFailHandler"))
|
||||
Ω(content).Should(ContainSubstring("RunSpecs"))
|
||||
|
||||
Ω(content).Should(ContainSubstring("var It = ginkgo.It"))
|
||||
Ω(content).Should(ContainSubstring("var Ω = gomega.Ω"))
|
||||
|
||||
Ω(content).Should(ContainSubstring("\t" + `"github.com/onsi/ginkgo"`))
|
||||
Ω(content).Should(ContainSubstring("\t" + `"github.com/onsi/gomega"`))
|
||||
})
|
||||
|
||||
It("should generate an agouti bootstrap file when told to", func() {
|
||||
session := startGinkgo(pkgPath, "bootstrap", "--agouti")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("foo_suite_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_test"))
|
||||
Ω(content).Should(ContainSubstring("func TestFoo(t *testing.T) {"))
|
||||
Ω(content).Should(ContainSubstring("RegisterFailHandler"))
|
||||
Ω(content).Should(ContainSubstring("RunSpecs"))
|
||||
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/sclevine/agouti/core"`))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("nodot", func() {
|
||||
It("should update the declarations in the bootstrap file", func() {
|
||||
pkgPath := tmpPath("foo")
|
||||
os.Mkdir(pkgPath, 0777)
|
||||
|
||||
session := startGinkgo(pkgPath, "bootstrap", "--nodot")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
byteContent, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
content := string(byteContent)
|
||||
content = strings.Replace(content, "var It =", "var MyIt =", -1)
|
||||
content = strings.Replace(content, "var Ω = gomega.Ω\n", "", -1)
|
||||
|
||||
err = ioutil.WriteFile(filepath.Join(pkgPath, "foo_suite_test.go"), []byte(content), os.ModePerm)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
session = startGinkgo(pkgPath, "nodot")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
byteContent, err = ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(byteContent).Should(ContainSubstring("var MyIt = ginkgo.It"))
|
||||
Ω(byteContent).ShouldNot(ContainSubstring("var It = ginkgo.It"))
|
||||
Ω(byteContent).Should(ContainSubstring("var Ω = gomega.Ω"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("ginkgo generate", func() {
|
||||
var pkgPath string
|
||||
|
||||
BeforeEach(func() {
|
||||
pkgPath = tmpPath("foo_bar")
|
||||
os.Mkdir(pkgPath, 0777)
|
||||
})
|
||||
|
||||
Context("with no arguments", func() {
|
||||
It("should generate a test file named after the package", func() {
|
||||
session := startGinkgo(pkgPath, "generate")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("foo_bar_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_bar_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||
Ω(content).Should(ContainSubstring(`var _ = Describe("FooBar", func() {`))
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||
|
||||
session = startGinkgo(pkgPath, "generate")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output = session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("foo_bar_test.go already exists"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with an argument of the form: foo", func() {
|
||||
It("should generate a test file named after the argument", func() {
|
||||
session := startGinkgo(pkgPath, "generate", "baz_buzz")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("baz_buzz_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||
Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with an argument of the form: foo.go", func() {
|
||||
It("should generate a test file named after the argument", func() {
|
||||
session := startGinkgo(pkgPath, "generate", "baz_buzz.go")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("baz_buzz_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||
Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`))
|
||||
|
||||
})
|
||||
})
|
||||
|
||||
Context("with an argument of the form: foo_test", func() {
|
||||
It("should generate a test file named after the argument", func() {
|
||||
session := startGinkgo(pkgPath, "generate", "baz_buzz_test")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("baz_buzz_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||
Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with an argument of the form: foo_test.go", func() {
|
||||
It("should generate a test file named after the argument", func() {
|
||||
session := startGinkgo(pkgPath, "generate", "baz_buzz_test.go")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("baz_buzz_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||
Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with multiple arguments", func() {
|
||||
It("should generate a test file named after the argument", func() {
|
||||
session := startGinkgo(pkgPath, "generate", "baz", "buzz")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("baz_test.go"))
|
||||
Ω(output).Should(ContainSubstring("buzz_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||
Ω(content).Should(ContainSubstring(`var _ = Describe("Baz", func() {`))
|
||||
|
||||
content, err = ioutil.ReadFile(filepath.Join(pkgPath, "buzz_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||
Ω(content).Should(ContainSubstring(`var _ = Describe("Buzz", func() {`))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with nodot", func() {
|
||||
It("should not import ginkgo or gomega", func() {
|
||||
session := startGinkgo(pkgPath, "generate", "--nodot")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("foo_bar_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_bar_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||
Ω(content).ShouldNot(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||
Ω(content).ShouldNot(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with agouti", func() {
|
||||
It("should generate an agouti test file", func() {
|
||||
session := startGinkgo(pkgPath, "generate", "--agouti")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("foo_bar_test.go"))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_bar_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package foo_bar_test"))
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`))
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`))
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/sclevine/agouti/core"`))
|
||||
Ω(content).Should(ContainSubstring("\t" + `. "github.com/sclevine/agouti/matchers"`))
|
||||
Ω(content).Should(ContainSubstring("page, err = agoutiDriver.Page()"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("ginkgo bootstrap/generate", func() {
|
||||
var pkgPath string
|
||||
BeforeEach(func() {
|
||||
pkgPath = tmpPath("some crazy-thing")
|
||||
os.Mkdir(pkgPath, 0777)
|
||||
})
|
||||
|
||||
Context("when the working directory is empty", func() {
|
||||
It("generates correctly named bootstrap and generate files with a package name derived from the directory", func() {
|
||||
session := startGinkgo(pkgPath, "bootstrap")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_suite_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package some_crazy_thing_test"))
|
||||
Ω(content).Should(ContainSubstring("SomeCrazyThing Suite"))
|
||||
|
||||
session = startGinkgo(pkgPath, "generate")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
content, err = ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package some_crazy_thing_test"))
|
||||
Ω(content).Should(ContainSubstring("SomeCrazyThing"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the working directory contains a file with a package name", func() {
|
||||
BeforeEach(func() {
|
||||
Ω(ioutil.WriteFile(filepath.Join(pkgPath, "foo.go"), []byte("package main\n\nfunc main() {}"), 0777)).Should(Succeed())
|
||||
})
|
||||
|
||||
It("generates correctly named bootstrap and generate files with the package name", func() {
|
||||
session := startGinkgo(pkgPath, "bootstrap")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
content, err := ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_suite_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package main_test"))
|
||||
Ω(content).Should(ContainSubstring("SomeCrazyThing Suite"))
|
||||
|
||||
session = startGinkgo(pkgPath, "generate")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
content, err = ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_test.go"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(content).Should(ContainSubstring("package main_test"))
|
||||
Ω(content).Should(ContainSubstring("SomeCrazyThing"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("ginkgo blur", func() {
|
||||
It("should unfocus tests", func() {
|
||||
pathToTest := tmpPath("focused")
|
||||
copyIn("focused_fixture", pathToTest)
|
||||
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("3 Passed"))
|
||||
Ω(output).Should(ContainSubstring("3 Skipped"))
|
||||
|
||||
session = startGinkgo(pathToTest, "blur")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
|
||||
session = startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output = session.Out.Contents()
|
||||
Ω(output).Should(ContainSubstring("6 Passed"))
|
||||
Ω(output).Should(ContainSubstring("0 Skipped"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("ginkgo version", func() {
|
||||
It("should print out the version info", func() {
|
||||
session := startGinkgo("", "version")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(MatchRegexp(`Ginkgo Version \d+\.\d+\.\d+`))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("ginkgo help", func() {
|
||||
It("should print out usage information", func() {
|
||||
session := startGinkgo("", "help")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Err.Contents())
|
||||
|
||||
Ω(output).Should(MatchRegexp(`Ginkgo Version \d+\.\d+\.\d+`))
|
||||
Ω(output).Should(ContainSubstring("ginkgo watch"))
|
||||
Ω(output).Should(ContainSubstring("-succinct"))
|
||||
Ω(output).Should(ContainSubstring("-nodes"))
|
||||
Ω(output).Should(ContainSubstring("ginkgo generate"))
|
||||
})
|
||||
})
|
||||
})
|
||||
177
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/suite_setup_test.go
generated
vendored
177
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/suite_setup_test.go
generated
vendored
@@ -1,177 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var _ = Describe("SuiteSetup", func() {
|
||||
var pathToTest string
|
||||
|
||||
Context("when the BeforeSuite and AfterSuite pass", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("suite_setup")
|
||||
copyIn("passing_suite_setup", pathToTest)
|
||||
})
|
||||
|
||||
It("should run the BeforeSuite once, then run all the tests", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(1))
|
||||
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(1))
|
||||
})
|
||||
|
||||
It("should run the BeforeSuite once per parallel node, then run all the tests", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--nodes=2")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(2))
|
||||
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(2))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the BeforeSuite fails", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("suite_setup")
|
||||
copyIn("failing_before_suite", pathToTest)
|
||||
})
|
||||
|
||||
It("should run the BeforeSuite once, none of the tests, but it should run the AfterSuite", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(1))
|
||||
Ω(strings.Count(output, "Test Panicked")).Should(Equal(1))
|
||||
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(1))
|
||||
Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS"))
|
||||
})
|
||||
|
||||
It("should run the BeforeSuite once per parallel node, none of the tests, but it should run the AfterSuite for each node", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--nodes=2")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(2))
|
||||
Ω(strings.Count(output, "Test Panicked")).Should(Equal(2))
|
||||
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(2))
|
||||
Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the AfterSuite fails", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("suite_setup")
|
||||
copyIn("failing_after_suite", pathToTest)
|
||||
})
|
||||
|
||||
It("should run the BeforeSuite once, none of the tests, but it should run the AfterSuite", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(1))
|
||||
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(1))
|
||||
Ω(strings.Count(output, "Test Panicked")).Should(Equal(1))
|
||||
Ω(strings.Count(output, "A TEST")).Should(Equal(2))
|
||||
})
|
||||
|
||||
It("should run the BeforeSuite once per parallel node, none of the tests, but it should run the AfterSuite for each node", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--nodes=2")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(2))
|
||||
Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(2))
|
||||
Ω(strings.Count(output, "Test Panicked")).Should(Equal(2))
|
||||
Ω(strings.Count(output, "A TEST")).Should(Equal(2))
|
||||
})
|
||||
})
|
||||
|
||||
Context("With passing synchronized before and after suites", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("suite_setup")
|
||||
copyIn("synchronized_setup_tests", pathToTest)
|
||||
})
|
||||
|
||||
Context("when run with one node", func() {
|
||||
It("should do all the work on that one node", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("BEFORE_A_1\nBEFORE_B_1: DATA"))
|
||||
Ω(output).Should(ContainSubstring("AFTER_A_1\nAFTER_B_1"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when run across multiple nodes", func() {
|
||||
It("should run the first BeforeSuite function (BEFORE_A) on node 1, the second (BEFORE_B) on all the nodes, the first AfterSuite (AFTER_A) on all the nodes, and then the second (AFTER_B) on Node 1 *after* everything else is finished", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--nodes=3")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("BEFORE_A_1"))
|
||||
Ω(output).Should(ContainSubstring("BEFORE_B_1: DATA"))
|
||||
Ω(output).Should(ContainSubstring("BEFORE_B_2: DATA"))
|
||||
Ω(output).Should(ContainSubstring("BEFORE_B_3: DATA"))
|
||||
|
||||
Ω(output).ShouldNot(ContainSubstring("BEFORE_A_2"))
|
||||
Ω(output).ShouldNot(ContainSubstring("BEFORE_A_3"))
|
||||
|
||||
Ω(output).Should(ContainSubstring("AFTER_A_1"))
|
||||
Ω(output).Should(ContainSubstring("AFTER_A_2"))
|
||||
Ω(output).Should(ContainSubstring("AFTER_A_3"))
|
||||
Ω(output).Should(ContainSubstring("AFTER_B_1"))
|
||||
|
||||
Ω(output).ShouldNot(ContainSubstring("AFTER_B_2"))
|
||||
Ω(output).ShouldNot(ContainSubstring("AFTER_B_3"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when streaming across multiple nodes", func() {
|
||||
It("should run the first BeforeSuite function (BEFORE_A) on node 1, the second (BEFORE_B) on all the nodes, the first AfterSuite (AFTER_A) on all the nodes, and then the second (AFTER_B) on Node 1 *after* everything else is finished", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--nodes=3", "--stream")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("[1] BEFORE_A_1"))
|
||||
Ω(output).Should(ContainSubstring("[1] BEFORE_B_1: DATA"))
|
||||
Ω(output).Should(ContainSubstring("[2] BEFORE_B_2: DATA"))
|
||||
Ω(output).Should(ContainSubstring("[3] BEFORE_B_3: DATA"))
|
||||
|
||||
Ω(output).ShouldNot(ContainSubstring("BEFORE_A_2"))
|
||||
Ω(output).ShouldNot(ContainSubstring("BEFORE_A_3"))
|
||||
|
||||
Ω(output).Should(ContainSubstring("[1] AFTER_A_1"))
|
||||
Ω(output).Should(ContainSubstring("[2] AFTER_A_2"))
|
||||
Ω(output).Should(ContainSubstring("[3] AFTER_A_3"))
|
||||
Ω(output).Should(ContainSubstring("[1] AFTER_B_1"))
|
||||
|
||||
Ω(output).ShouldNot(ContainSubstring("AFTER_B_2"))
|
||||
Ω(output).ShouldNot(ContainSubstring("AFTER_B_3"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("With a failing synchronized before suite", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("suite_setup")
|
||||
copyIn("exiting_synchronized_setup_tests", pathToTest)
|
||||
})
|
||||
|
||||
It("should fail and let the user know that node 1 disappeared prematurely", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--nodes=3")
|
||||
Eventually(session).Should(gexec.Exit(1))
|
||||
output := string(session.Out.Contents())
|
||||
|
||||
Ω(output).Should(ContainSubstring("Node 1 disappeared before completing BeforeSuite"))
|
||||
Ω(output).Should(ContainSubstring("Ginkgo timed out waiting for all parallel nodes to end"))
|
||||
})
|
||||
})
|
||||
})
|
||||
27
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/tags_test.go
generated
vendored
27
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/tags_test.go
generated
vendored
@@ -1,27 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("Tags", func() {
|
||||
var pathToTest string
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("tags")
|
||||
copyIn("tags_tests", pathToTest)
|
||||
})
|
||||
|
||||
It("should honor the passed in -tags flag", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := string(session.Out.Contents())
|
||||
Ω(output).Should(ContainSubstring("Ran 1 of 1 Specs"))
|
||||
|
||||
session = startGinkgo(pathToTest, "--noColor", "-tags=complex_tests")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output = string(session.Out.Contents())
|
||||
Ω(output).Should(ContainSubstring("Ran 3 of 3 Specs"))
|
||||
})
|
||||
})
|
||||
80
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/verbose_and_succinct_test.go
generated
vendored
80
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/verbose_and_succinct_test.go
generated
vendored
@@ -1,80 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("Verbose And Succinct Mode", func() {
|
||||
var pathToTest string
|
||||
var otherPathToTest string
|
||||
|
||||
Context("when running one package", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
})
|
||||
|
||||
It("should default to non-succinct mode", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when running more than one package", func() {
|
||||
BeforeEach(func() {
|
||||
pathToTest = tmpPath("ginkgo")
|
||||
copyIn("passing_ginkgo_tests", pathToTest)
|
||||
otherPathToTest = tmpPath("more_ginkgo")
|
||||
copyIn("more_ginkgo_tests", otherPathToTest)
|
||||
})
|
||||
|
||||
Context("with no flags set", func() {
|
||||
It("should default to succinct mode", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", pathToTest, otherPathToTest)
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS!"))
|
||||
Ω(output).Should(ContainSubstring("] More_ginkgo_tests Suite - 2/2 specs •• SUCCESS!"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with --succinct=false", func() {
|
||||
It("should not be in succinct mode", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "--succinct=false", pathToTest, otherPathToTest)
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with -v", func() {
|
||||
It("should not be in succinct mode, but should be verbose", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "-v", pathToTest, otherPathToTest)
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite"))
|
||||
Ω(output).Should(ContainSubstring("should proxy strings"))
|
||||
Ω(output).Should(ContainSubstring("should always pass"))
|
||||
})
|
||||
|
||||
It("should emit output from Bys", func() {
|
||||
session := startGinkgo(pathToTest, "--noColor", "-v", pathToTest)
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
output := session.Out.Contents()
|
||||
|
||||
Ω(output).Should(ContainSubstring("emitting one By"))
|
||||
Ω(output).Should(ContainSubstring("emitting another By"))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
239
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/watch_test.go
generated
vendored
239
Godeps/_workspace/src/github.com/onsi/ginkgo/integration/watch_test.go
generated
vendored
@@ -1,239 +0,0 @@
|
||||
package integration_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gbytes"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
)
|
||||
|
||||
var _ = Describe("Watch", func() {
|
||||
var rootPath string
|
||||
var pathA string
|
||||
var pathB string
|
||||
var pathC string
|
||||
var session *gexec.Session
|
||||
|
||||
BeforeEach(func() {
|
||||
rootPath = tmpPath("root")
|
||||
pathA = filepath.Join(rootPath, "src", "github.com", "onsi", "A")
|
||||
pathB = filepath.Join(rootPath, "src", "github.com", "onsi", "B")
|
||||
pathC = filepath.Join(rootPath, "src", "github.com", "onsi", "C")
|
||||
|
||||
err := os.MkdirAll(pathA, 0700)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
err = os.MkdirAll(pathB, 0700)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
err = os.MkdirAll(pathC, 0700)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
copyIn(filepath.Join("watch_fixtures", "A"), pathA)
|
||||
copyIn(filepath.Join("watch_fixtures", "B"), pathB)
|
||||
copyIn(filepath.Join("watch_fixtures", "C"), pathC)
|
||||
})
|
||||
|
||||
startGinkgoWithGopath := func(args ...string) *gexec.Session {
|
||||
cmd := ginkgoCommand(rootPath, args...)
|
||||
cmd.Env = append([]string{"GOPATH=" + rootPath + ":" + os.Getenv("GOPATH")}, os.Environ()...)
|
||||
session, err := gexec.Start(cmd, GinkgoWriter, GinkgoWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
return session
|
||||
}
|
||||
|
||||
modifyFile := func(path string) {
|
||||
time.Sleep(time.Second)
|
||||
content, err := ioutil.ReadFile(path)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
content = append(content, []byte("//")...)
|
||||
err = ioutil.WriteFile(path, content, 0666)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
}
|
||||
|
||||
modifyCode := func(pkgToModify string) {
|
||||
modifyFile(filepath.Join(rootPath, "src", "github.com", "onsi", pkgToModify, pkgToModify+".go"))
|
||||
}
|
||||
|
||||
modifyTest := func(pkgToModify string) {
|
||||
modifyFile(filepath.Join(rootPath, "src", "github.com", "onsi", pkgToModify, pkgToModify+"_test.go"))
|
||||
}
|
||||
|
||||
AfterEach(func() {
|
||||
if session != nil {
|
||||
session.Kill().Wait()
|
||||
}
|
||||
})
|
||||
|
||||
It("should be set up correctly", func() {
|
||||
session = startGinkgoWithGopath("-r")
|
||||
Eventually(session).Should(gexec.Exit(0))
|
||||
Ω(session.Out.Contents()).Should(ContainSubstring("A Suite"))
|
||||
Ω(session.Out.Contents()).Should(ContainSubstring("B Suite"))
|
||||
Ω(session.Out.Contents()).Should(ContainSubstring("C Suite"))
|
||||
Ω(session.Out.Contents()).Should(ContainSubstring("Ginkgo ran 3 suites"))
|
||||
})
|
||||
|
||||
Context("when watching just one test suite", func() {
|
||||
It("should immediately run, and should rerun when the test suite changes", func() {
|
||||
session = startGinkgoWithGopath("watch", "-succinct", pathA)
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
modifyCode("A")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
session.Kill().Wait()
|
||||
})
|
||||
})
|
||||
|
||||
Context("when watching several test suites", func() {
|
||||
It("should not immediately run, but should rerun a test when its code changes", func() {
|
||||
session = startGinkgoWithGopath("watch", "-succinct", "-r")
|
||||
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("A Suite|B Suite|C Suite"))
|
||||
modifyCode("A")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||
session.Kill().Wait()
|
||||
})
|
||||
})
|
||||
|
||||
Describe("watching dependencies", func() {
|
||||
Context("with a depth of 2", func() {
|
||||
It("should watch down to that depth", func() {
|
||||
session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=2")
|
||||
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||
Eventually(session).Should(gbytes.Say(`A \[2 dependencies\]`))
|
||||
Eventually(session).Should(gbytes.Say(`B \[1 dependency\]`))
|
||||
Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`))
|
||||
|
||||
modifyCode("A")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||
|
||||
modifyCode("B")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("C Suite"))
|
||||
|
||||
modifyCode("C")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with a depth of 1", func() {
|
||||
It("should watch down to that depth", func() {
|
||||
session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=1")
|
||||
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||
Eventually(session).Should(gbytes.Say(`A \[1 dependency\]`))
|
||||
Eventually(session).Should(gbytes.Say(`B \[1 dependency\]`))
|
||||
Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`))
|
||||
|
||||
modifyCode("A")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||
|
||||
modifyCode("B")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("C Suite"))
|
||||
|
||||
modifyCode("C")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("A Suite"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with a depth of 0", func() {
|
||||
It("should not watch any dependencies", func() {
|
||||
session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=0")
|
||||
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||
Eventually(session).Should(gbytes.Say(`A \[0 dependencies\]`))
|
||||
Eventually(session).Should(gbytes.Say(`B \[0 dependencies\]`))
|
||||
Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`))
|
||||
|
||||
modifyCode("A")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||
|
||||
modifyCode("B")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("A Suite|C Suite"))
|
||||
|
||||
modifyCode("C")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("A Suite|B Suite"))
|
||||
})
|
||||
})
|
||||
|
||||
It("should not trigger dependents when tests are changed", func() {
|
||||
session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=2")
|
||||
Eventually(session).Should(gbytes.Say("Identified 3 test suites"))
|
||||
Eventually(session).Should(gbytes.Say(`A \[2 dependencies\]`))
|
||||
Eventually(session).Should(gbytes.Say(`B \[1 dependency\]`))
|
||||
Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`))
|
||||
|
||||
modifyTest("A")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("A Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite"))
|
||||
|
||||
modifyTest("B")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("B Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("A Suite|C Suite"))
|
||||
|
||||
modifyTest("C")
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||
Consistently(session).ShouldNot(gbytes.Say("A Suite|B Suite"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("when new test suite is added", func() {
|
||||
It("should start monitoring that test suite", func() {
|
||||
session = startGinkgoWithGopath("watch", "-succinct", "-r")
|
||||
|
||||
Eventually(session).Should(gbytes.Say("Watching 3 suites"))
|
||||
|
||||
pathD := filepath.Join(rootPath, "src", "github.com", "onsi", "D")
|
||||
|
||||
err := os.MkdirAll(pathD, 0700)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
copyIn(filepath.Join("watch_fixtures", "D"), pathD)
|
||||
|
||||
Eventually(session).Should(gbytes.Say("Detected 1 new suite"))
|
||||
Eventually(session).Should(gbytes.Say(`D \[1 dependency\]`))
|
||||
Eventually(session).Should(gbytes.Say("D Suite"))
|
||||
|
||||
modifyCode("D")
|
||||
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("D Suite"))
|
||||
|
||||
modifyCode("C")
|
||||
|
||||
Eventually(session).Should(gbytes.Say("Detected changes in"))
|
||||
Eventually(session).Should(gbytes.Say("C Suite"))
|
||||
Eventually(session).Should(gbytes.Say("D Suite"))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,13 +0,0 @@
|
||||
package codelocation_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCodelocation(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "CodeLocation Suite")
|
||||
}
|
||||
79
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location_test.go
generated
vendored
79
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location_test.go
generated
vendored
@@ -1,79 +0,0 @@
|
||||
package codelocation_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
var _ = Describe("CodeLocation", func() {
|
||||
var (
|
||||
codeLocation types.CodeLocation
|
||||
expectedFileName string
|
||||
expectedLineNumber int
|
||||
)
|
||||
|
||||
caller0 := func() {
|
||||
codeLocation = codelocation.New(1)
|
||||
}
|
||||
|
||||
caller1 := func() {
|
||||
_, expectedFileName, expectedLineNumber, _ = runtime.Caller(0)
|
||||
expectedLineNumber += 2
|
||||
caller0()
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
caller1()
|
||||
})
|
||||
|
||||
It("should use the passed in skip parameter to pick out the correct file & line number", func() {
|
||||
Ω(codeLocation.FileName).Should(Equal(expectedFileName))
|
||||
Ω(codeLocation.LineNumber).Should(Equal(expectedLineNumber))
|
||||
})
|
||||
|
||||
Describe("stringer behavior", func() {
|
||||
It("should stringify nicely", func() {
|
||||
Ω(codeLocation.String()).Should(ContainSubstring("code_location_test.go:%d", expectedLineNumber))
|
||||
})
|
||||
})
|
||||
|
||||
//There's no better way than to test this private method as it
|
||||
//goes out of its way to prune out ginkgo related code in the stack trace
|
||||
Describe("PruneStack", func() {
|
||||
It("should remove any references to ginkgo and pkg/testing and pkg/runtime", func() {
|
||||
input := `/Skip/me
|
||||
Skip: skip()
|
||||
/Skip/me
|
||||
Skip: skip()
|
||||
/Users/whoever/gospace/src/github.com/onsi/ginkgo/whatever.go:10 (0x12314)
|
||||
Something: Func()
|
||||
/Users/whoever/gospace/src/github.com/onsi/ginkgo/whatever_else.go:10 (0x12314)
|
||||
SomethingInternalToGinkgo: Func()
|
||||
/usr/goroot/pkg/strings/oops.go:10 (0x12341)
|
||||
Oops: BlowUp()
|
||||
/Users/whoever/gospace/src/mycode/code.go:10 (0x12341)
|
||||
MyCode: Func()
|
||||
/Users/whoever/gospace/src/mycode/code_test.go:10 (0x12341)
|
||||
MyCodeTest: Func()
|
||||
/Users/whoever/gospace/src/mycode/code_suite_test.go:12 (0x37f08)
|
||||
TestFoo: RunSpecs(t, "Foo Suite")
|
||||
/usr/goroot/pkg/testing/testing.go:12 (0x37f08)
|
||||
TestingT: Blah()
|
||||
/usr/goroot/pkg/runtime/runtime.go:12 (0x37f08)
|
||||
Something: Func()
|
||||
`
|
||||
prunedStack := codelocation.PruneStack(input, 1)
|
||||
Ω(prunedStack).Should(Equal(`/usr/goroot/pkg/strings/oops.go:10 (0x12341)
|
||||
Oops: BlowUp()
|
||||
/Users/whoever/gospace/src/mycode/code.go:10 (0x12341)
|
||||
MyCode: Func()
|
||||
/Users/whoever/gospace/src/mycode/code_test.go:10 (0x12341)
|
||||
MyCodeTest: Func()
|
||||
/Users/whoever/gospace/src/mycode/code_suite_test.go:12 (0x37f08)
|
||||
TestFoo: RunSpecs(t, "Foo Suite")`))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,13 +0,0 @@
|
||||
package containernode_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestContainernode(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Containernode Suite")
|
||||
}
|
||||
212
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node_test.go
generated
vendored
212
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node_test.go
generated
vendored
@@ -1,212 +0,0 @@
|
||||
package containernode_test
|
||||
|
||||
import (
|
||||
"github.com/onsi/ginkgo/internal/leafnodes"
|
||||
"math/rand"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
. "github.com/onsi/ginkgo/internal/containernode"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
)
|
||||
|
||||
var _ = Describe("Container Node", func() {
|
||||
var (
|
||||
codeLocation types.CodeLocation
|
||||
container *ContainerNode
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
codeLocation = codelocation.New(0)
|
||||
container = New("description text", types.FlagTypeFocused, codeLocation)
|
||||
})
|
||||
|
||||
Describe("creating a container node", func() {
|
||||
It("can answer questions about itself", func() {
|
||||
Ω(container.Text()).Should(Equal("description text"))
|
||||
Ω(container.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||
Ω(container.CodeLocation()).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("pushing setup nodes", func() {
|
||||
It("can append setup nodes of various types and fetch them by type", func() {
|
||||
befA := leafnodes.NewBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||
befB := leafnodes.NewBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||
aftA := leafnodes.NewAfterEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||
aftB := leafnodes.NewAfterEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||
jusBefA := leafnodes.NewJustBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||
jusBefB := leafnodes.NewJustBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0)
|
||||
|
||||
container.PushSetupNode(befA)
|
||||
container.PushSetupNode(befB)
|
||||
container.PushSetupNode(aftA)
|
||||
container.PushSetupNode(aftB)
|
||||
container.PushSetupNode(jusBefA)
|
||||
container.PushSetupNode(jusBefB)
|
||||
|
||||
subject := leafnodes.NewItNode("subject", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||
container.PushSubjectNode(subject)
|
||||
|
||||
Ω(container.SetupNodesOfType(types.SpecComponentTypeBeforeEach)).Should(Equal([]leafnodes.BasicNode{befA, befB}))
|
||||
Ω(container.SetupNodesOfType(types.SpecComponentTypeAfterEach)).Should(Equal([]leafnodes.BasicNode{aftA, aftB}))
|
||||
Ω(container.SetupNodesOfType(types.SpecComponentTypeJustBeforeEach)).Should(Equal([]leafnodes.BasicNode{jusBefA, jusBefB}))
|
||||
Ω(container.SetupNodesOfType(types.SpecComponentTypeIt)).Should(BeEmpty()) //subjects are not setup nodes
|
||||
})
|
||||
})
|
||||
|
||||
Context("With appended containers and subject nodes", func() {
|
||||
var (
|
||||
itA, itB, innerItA, innerItB leafnodes.SubjectNode
|
||||
innerContainer *ContainerNode
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
itA = leafnodes.NewItNode("Banana", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||
itB = leafnodes.NewItNode("Apple", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||
|
||||
innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||
innerItB = leafnodes.NewItNode("inner B", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||
|
||||
innerContainer = New("Orange", types.FlagTypeNone, codelocation.New(0))
|
||||
|
||||
container.PushSubjectNode(itA)
|
||||
container.PushContainerNode(innerContainer)
|
||||
innerContainer.PushSubjectNode(innerItA)
|
||||
innerContainer.PushSubjectNode(innerItB)
|
||||
container.PushSubjectNode(itB)
|
||||
})
|
||||
|
||||
Describe("Collating", func() {
|
||||
It("should return a collated set of containers and subject nodes in the correct order", func() {
|
||||
collated := container.Collate()
|
||||
Ω(collated).Should(HaveLen(4))
|
||||
|
||||
Ω(collated[0]).Should(Equal(CollatedNodes{
|
||||
Containers: []*ContainerNode{container},
|
||||
Subject: itA,
|
||||
}))
|
||||
|
||||
Ω(collated[1]).Should(Equal(CollatedNodes{
|
||||
Containers: []*ContainerNode{container, innerContainer},
|
||||
Subject: innerItA,
|
||||
}))
|
||||
|
||||
Ω(collated[2]).Should(Equal(CollatedNodes{
|
||||
Containers: []*ContainerNode{container, innerContainer},
|
||||
Subject: innerItB,
|
||||
}))
|
||||
|
||||
Ω(collated[3]).Should(Equal(CollatedNodes{
|
||||
Containers: []*ContainerNode{container},
|
||||
Subject: itB,
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Backpropagating Programmatic Focus", func() {
|
||||
//This allows inner focused specs to override the focus of outer focussed
|
||||
//specs and more closely maps to what a developer wants to happen
|
||||
//when debugging a test suite
|
||||
|
||||
Context("when a parent is focused *and* an inner subject is focused", func() {
|
||||
BeforeEach(func() {
|
||||
container = New("description text", types.FlagTypeFocused, codeLocation)
|
||||
itA = leafnodes.NewItNode("A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||
container.PushSubjectNode(itA)
|
||||
|
||||
innerContainer = New("Orange", types.FlagTypeNone, codelocation.New(0))
|
||||
container.PushContainerNode(innerContainer)
|
||||
innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeFocused, codelocation.New(0), 0, nil, 0)
|
||||
innerContainer.PushSubjectNode(innerItA)
|
||||
})
|
||||
|
||||
It("should unfocus the parent", func() {
|
||||
container.BackPropagateProgrammaticFocus()
|
||||
|
||||
Ω(container.Flag()).Should(Equal(types.FlagTypeNone))
|
||||
Ω(itA.Flag()).Should(Equal(types.FlagTypeNone))
|
||||
Ω(innerContainer.Flag()).Should(Equal(types.FlagTypeNone))
|
||||
Ω(innerItA.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a parent is focused *and* an inner container is focused", func() {
|
||||
BeforeEach(func() {
|
||||
container = New("description text", types.FlagTypeFocused, codeLocation)
|
||||
itA = leafnodes.NewItNode("A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||
container.PushSubjectNode(itA)
|
||||
|
||||
innerContainer = New("Orange", types.FlagTypeFocused, codelocation.New(0))
|
||||
container.PushContainerNode(innerContainer)
|
||||
innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||
innerContainer.PushSubjectNode(innerItA)
|
||||
})
|
||||
|
||||
It("should unfocus the parent", func() {
|
||||
container.BackPropagateProgrammaticFocus()
|
||||
|
||||
Ω(container.Flag()).Should(Equal(types.FlagTypeNone))
|
||||
Ω(itA.Flag()).Should(Equal(types.FlagTypeNone))
|
||||
Ω(innerContainer.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||
Ω(innerItA.Flag()).Should(Equal(types.FlagTypeNone))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a parent is pending and a child is focused", func() {
|
||||
BeforeEach(func() {
|
||||
container = New("description text", types.FlagTypeFocused, codeLocation)
|
||||
itA = leafnodes.NewItNode("A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0)
|
||||
container.PushSubjectNode(itA)
|
||||
|
||||
innerContainer = New("Orange", types.FlagTypePending, codelocation.New(0))
|
||||
container.PushContainerNode(innerContainer)
|
||||
innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeFocused, codelocation.New(0), 0, nil, 0)
|
||||
innerContainer.PushSubjectNode(innerItA)
|
||||
})
|
||||
|
||||
It("should not do anything", func() {
|
||||
container.BackPropagateProgrammaticFocus()
|
||||
|
||||
Ω(container.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||
Ω(itA.Flag()).Should(Equal(types.FlagTypeNone))
|
||||
Ω(innerContainer.Flag()).Should(Equal(types.FlagTypePending))
|
||||
Ω(innerItA.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Shuffling", func() {
|
||||
var unshuffledCollation []CollatedNodes
|
||||
BeforeEach(func() {
|
||||
unshuffledCollation = container.Collate()
|
||||
|
||||
r := rand.New(rand.NewSource(17))
|
||||
container.Shuffle(r)
|
||||
})
|
||||
|
||||
It("should sort, and then shuffle, the top level contents of the container", func() {
|
||||
shuffledCollation := container.Collate()
|
||||
Ω(shuffledCollation).Should(HaveLen(len(unshuffledCollation)))
|
||||
Ω(shuffledCollation).ShouldNot(Equal(unshuffledCollation))
|
||||
|
||||
for _, entry := range unshuffledCollation {
|
||||
Ω(shuffledCollation).Should(ContainElement(entry))
|
||||
}
|
||||
|
||||
innerAIndex, innerBIndex := 0, 0
|
||||
for i, entry := range shuffledCollation {
|
||||
if entry.Subject == innerItA {
|
||||
innerAIndex = i
|
||||
} else if entry.Subject == innerItB {
|
||||
innerBIndex = i
|
||||
}
|
||||
}
|
||||
|
||||
Ω(innerAIndex).Should(Equal(innerBIndex - 1))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package failer_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestFailer(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Failer Suite")
|
||||
}
|
||||
125
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_test.go
generated
vendored
125
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_test.go
generated
vendored
@@ -1,125 +0,0 @@
|
||||
package failer_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/failer"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
)
|
||||
|
||||
var _ = Describe("Failer", func() {
|
||||
var (
|
||||
failer *Failer
|
||||
codeLocationA types.CodeLocation
|
||||
codeLocationB types.CodeLocation
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
codeLocationA = codelocation.New(0)
|
||||
codeLocationB = codelocation.New(0)
|
||||
failer = New()
|
||||
})
|
||||
|
||||
Context("with no failures", func() {
|
||||
It("should return success when drained", func() {
|
||||
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||
Ω(failure).Should(BeZero())
|
||||
Ω(state).Should(Equal(types.SpecStatePassed))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Fail", func() {
|
||||
It("should handle failures", func() {
|
||||
failer.Fail("something failed", codeLocationA)
|
||||
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||
Ω(failure).Should(Equal(types.SpecFailure{
|
||||
Message: "something failed",
|
||||
Location: codeLocationA,
|
||||
ForwardedPanic: "",
|
||||
ComponentType: types.SpecComponentTypeIt,
|
||||
ComponentIndex: 3,
|
||||
ComponentCodeLocation: codeLocationB,
|
||||
}))
|
||||
Ω(state).Should(Equal(types.SpecStateFailed))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Panic", func() {
|
||||
It("should handle panics", func() {
|
||||
failer.Panic(codeLocationA, "some forwarded panic")
|
||||
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||
Ω(failure).Should(Equal(types.SpecFailure{
|
||||
Message: "Test Panicked",
|
||||
Location: codeLocationA,
|
||||
ForwardedPanic: "some forwarded panic",
|
||||
ComponentType: types.SpecComponentTypeIt,
|
||||
ComponentIndex: 3,
|
||||
ComponentCodeLocation: codeLocationB,
|
||||
}))
|
||||
Ω(state).Should(Equal(types.SpecStatePanicked))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Timeout", func() {
|
||||
It("should handle timeouts", func() {
|
||||
failer.Timeout(codeLocationA)
|
||||
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||
Ω(failure).Should(Equal(types.SpecFailure{
|
||||
Message: "Timed out",
|
||||
Location: codeLocationA,
|
||||
ForwardedPanic: "",
|
||||
ComponentType: types.SpecComponentTypeIt,
|
||||
ComponentIndex: 3,
|
||||
ComponentCodeLocation: codeLocationB,
|
||||
}))
|
||||
Ω(state).Should(Equal(types.SpecStateTimedOut))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when multiple failures are registered", func() {
|
||||
BeforeEach(func() {
|
||||
failer.Fail("something failed", codeLocationA)
|
||||
failer.Fail("something else failed", codeLocationA)
|
||||
})
|
||||
|
||||
It("should only report the first one when drained", func() {
|
||||
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||
|
||||
Ω(failure).Should(Equal(types.SpecFailure{
|
||||
Message: "something failed",
|
||||
Location: codeLocationA,
|
||||
ForwardedPanic: "",
|
||||
ComponentType: types.SpecComponentTypeIt,
|
||||
ComponentIndex: 3,
|
||||
ComponentCodeLocation: codeLocationB,
|
||||
}))
|
||||
Ω(state).Should(Equal(types.SpecStateFailed))
|
||||
})
|
||||
|
||||
It("should report subsequent failures after being drained", func() {
|
||||
failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||
failer.Fail("yet another thing failed", codeLocationA)
|
||||
|
||||
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||
|
||||
Ω(failure).Should(Equal(types.SpecFailure{
|
||||
Message: "yet another thing failed",
|
||||
Location: codeLocationA,
|
||||
ForwardedPanic: "",
|
||||
ComponentType: types.SpecComponentTypeIt,
|
||||
ComponentIndex: 3,
|
||||
ComponentCodeLocation: codeLocationB,
|
||||
}))
|
||||
Ω(state).Should(Equal(types.SpecStateFailed))
|
||||
})
|
||||
|
||||
It("should report sucess on subsequent drains if no errors occur", func() {
|
||||
failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||
failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB)
|
||||
Ω(failure).Should(BeZero())
|
||||
Ω(state).Should(Equal(types.SpecStatePassed))
|
||||
})
|
||||
})
|
||||
})
|
||||
22
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/it_node_test.go
generated
vendored
22
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/it_node_test.go
generated
vendored
@@ -1,22 +0,0 @@
|
||||
package leafnodes_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
)
|
||||
|
||||
var _ = Describe("It Nodes", func() {
|
||||
It("should report the correct type, text, flag, and code location", func() {
|
||||
codeLocation := codelocation.New(0)
|
||||
it := NewItNode("my it node", func() {}, types.FlagTypeFocused, codeLocation, 0, nil, 3)
|
||||
Ω(it.Type()).Should(Equal(types.SpecComponentTypeIt))
|
||||
Ω(it.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||
Ω(it.Text()).Should(Equal("my it node"))
|
||||
Ω(it.CodeLocation()).Should(Equal(codeLocation))
|
||||
Ω(it.Samples()).Should(Equal(1))
|
||||
})
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/leaf_node_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/leaf_node_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package leafnodes_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLeafNode(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "LeafNode Suite")
|
||||
}
|
||||
109
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/measure_node_test.go
generated
vendored
109
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/measure_node_test.go
generated
vendored
@@ -1,109 +0,0 @@
|
||||
package leafnodes_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
"time"
|
||||
)
|
||||
|
||||
var _ = Describe("Measure Nodes", func() {
|
||||
It("should report the correct type, text, flag, and code location", func() {
|
||||
codeLocation := codelocation.New(0)
|
||||
measure := NewMeasureNode("my measure node", func(b Benchmarker) {}, types.FlagTypeFocused, codeLocation, 10, nil, 3)
|
||||
Ω(measure.Type()).Should(Equal(types.SpecComponentTypeMeasure))
|
||||
Ω(measure.Flag()).Should(Equal(types.FlagTypeFocused))
|
||||
Ω(measure.Text()).Should(Equal("my measure node"))
|
||||
Ω(measure.CodeLocation()).Should(Equal(codeLocation))
|
||||
Ω(measure.Samples()).Should(Equal(10))
|
||||
})
|
||||
|
||||
Describe("benchmarking", func() {
|
||||
var measure *MeasureNode
|
||||
|
||||
Describe("Value", func() {
|
||||
BeforeEach(func() {
|
||||
measure = NewMeasureNode("the measurement", func(b Benchmarker) {
|
||||
b.RecordValue("foo", 7, "info!")
|
||||
b.RecordValue("foo", 2)
|
||||
b.RecordValue("foo", 3)
|
||||
b.RecordValue("bar", 0.3)
|
||||
b.RecordValue("bar", 0.1)
|
||||
b.RecordValue("bar", 0.5)
|
||||
b.RecordValue("bar", 0.7)
|
||||
}, types.FlagTypeFocused, codelocation.New(0), 1, Failer.New(), 3)
|
||||
Ω(measure.Run()).Should(Equal(types.SpecStatePassed))
|
||||
})
|
||||
|
||||
It("records passed in values and reports on them", func() {
|
||||
report := measure.MeasurementsReport()
|
||||
Ω(report).Should(HaveLen(2))
|
||||
Ω(report["foo"].Name).Should(Equal("foo"))
|
||||
Ω(report["foo"].Info).Should(Equal("info!"))
|
||||
Ω(report["foo"].Order).Should(Equal(0))
|
||||
Ω(report["foo"].SmallestLabel).Should(Equal("Smallest"))
|
||||
Ω(report["foo"].LargestLabel).Should(Equal(" Largest"))
|
||||
Ω(report["foo"].AverageLabel).Should(Equal(" Average"))
|
||||
Ω(report["foo"].Units).Should(Equal(""))
|
||||
Ω(report["foo"].Results).Should(Equal([]float64{7, 2, 3}))
|
||||
Ω(report["foo"].Smallest).Should(BeNumerically("==", 2))
|
||||
Ω(report["foo"].Largest).Should(BeNumerically("==", 7))
|
||||
Ω(report["foo"].Average).Should(BeNumerically("==", 4))
|
||||
Ω(report["foo"].StdDeviation).Should(BeNumerically("~", 2.16, 0.01))
|
||||
|
||||
Ω(report["bar"].Name).Should(Equal("bar"))
|
||||
Ω(report["bar"].Info).Should(BeNil())
|
||||
Ω(report["bar"].SmallestLabel).Should(Equal("Smallest"))
|
||||
Ω(report["bar"].Order).Should(Equal(1))
|
||||
Ω(report["bar"].LargestLabel).Should(Equal(" Largest"))
|
||||
Ω(report["bar"].AverageLabel).Should(Equal(" Average"))
|
||||
Ω(report["bar"].Units).Should(Equal(""))
|
||||
Ω(report["bar"].Results).Should(Equal([]float64{0.3, 0.1, 0.5, 0.7}))
|
||||
Ω(report["bar"].Smallest).Should(BeNumerically("==", 0.1))
|
||||
Ω(report["bar"].Largest).Should(BeNumerically("==", 0.7))
|
||||
Ω(report["bar"].Average).Should(BeNumerically("==", 0.4))
|
||||
Ω(report["bar"].StdDeviation).Should(BeNumerically("~", 0.22, 0.01))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Time", func() {
|
||||
BeforeEach(func() {
|
||||
measure = NewMeasureNode("the measurement", func(b Benchmarker) {
|
||||
b.Time("foo", func() {
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
}, "info!")
|
||||
b.Time("foo", func() {
|
||||
time.Sleep(200 * time.Millisecond)
|
||||
})
|
||||
b.Time("foo", func() {
|
||||
time.Sleep(170 * time.Millisecond)
|
||||
})
|
||||
}, types.FlagTypeFocused, codelocation.New(0), 1, Failer.New(), 3)
|
||||
Ω(measure.Run()).Should(Equal(types.SpecStatePassed))
|
||||
})
|
||||
|
||||
It("records passed in values and reports on them", func() {
|
||||
report := measure.MeasurementsReport()
|
||||
Ω(report).Should(HaveLen(1))
|
||||
Ω(report["foo"].Name).Should(Equal("foo"))
|
||||
Ω(report["foo"].Info).Should(Equal("info!"))
|
||||
Ω(report["foo"].SmallestLabel).Should(Equal("Fastest Time"))
|
||||
Ω(report["foo"].LargestLabel).Should(Equal("Slowest Time"))
|
||||
Ω(report["foo"].AverageLabel).Should(Equal("Average Time"))
|
||||
Ω(report["foo"].Units).Should(Equal("s"))
|
||||
Ω(report["foo"].Results).Should(HaveLen(3))
|
||||
Ω(report["foo"].Results[0]).Should(BeNumerically("~", 0.1, 0.01))
|
||||
Ω(report["foo"].Results[1]).Should(BeNumerically("~", 0.2, 0.01))
|
||||
Ω(report["foo"].Results[2]).Should(BeNumerically("~", 0.17, 0.01))
|
||||
Ω(report["foo"].Smallest).Should(BeNumerically("~", 0.1, 0.01))
|
||||
Ω(report["foo"].Largest).Should(BeNumerically("~", 0.2, 0.01))
|
||||
Ω(report["foo"].Average).Should(BeNumerically("~", 0.16, 0.01))
|
||||
Ω(report["foo"].StdDeviation).Should(BeNumerically("~", 0.04, 0.01))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
40
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes_test.go
generated
vendored
40
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes_test.go
generated
vendored
@@ -1,40 +0,0 @@
|
||||
package leafnodes_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
)
|
||||
|
||||
var _ = Describe("Setup Nodes", func() {
|
||||
Describe("BeforeEachNodes", func() {
|
||||
It("should report the correct type and code location", func() {
|
||||
codeLocation := codelocation.New(0)
|
||||
beforeEach := NewBeforeEachNode(func() {}, codeLocation, 0, nil, 3)
|
||||
Ω(beforeEach.Type()).Should(Equal(types.SpecComponentTypeBeforeEach))
|
||||
Ω(beforeEach.CodeLocation()).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("AfterEachNodes", func() {
|
||||
It("should report the correct type and code location", func() {
|
||||
codeLocation := codelocation.New(0)
|
||||
afterEach := NewAfterEachNode(func() {}, codeLocation, 0, nil, 3)
|
||||
Ω(afterEach.Type()).Should(Equal(types.SpecComponentTypeAfterEach))
|
||||
Ω(afterEach.CodeLocation()).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("JustBeforeEachNodes", func() {
|
||||
It("should report the correct type and code location", func() {
|
||||
codeLocation := codelocation.New(0)
|
||||
justBeforeEach := NewJustBeforeEachNode(func() {}, codeLocation, 0, nil, 3)
|
||||
Ω(justBeforeEach.Type()).Should(Equal(types.SpecComponentTypeJustBeforeEach))
|
||||
Ω(justBeforeEach.CodeLocation()).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
})
|
||||
326
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/shared_runner_test.go
generated
vendored
326
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/shared_runner_test.go
generated
vendored
@@ -1,326 +0,0 @@
|
||||
package leafnodes_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"reflect"
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
)
|
||||
|
||||
type runnable interface {
|
||||
Run() (outcome types.SpecState, failure types.SpecFailure)
|
||||
CodeLocation() types.CodeLocation
|
||||
}
|
||||
|
||||
func SynchronousSharedRunnerBehaviors(build func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable, componentType types.SpecComponentType, componentIndex int) {
|
||||
var (
|
||||
outcome types.SpecState
|
||||
failure types.SpecFailure
|
||||
|
||||
failer *Failer.Failer
|
||||
|
||||
componentCodeLocation types.CodeLocation
|
||||
innerCodeLocation types.CodeLocation
|
||||
|
||||
didRun bool
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
failer = Failer.New()
|
||||
componentCodeLocation = codelocation.New(0)
|
||||
innerCodeLocation = codelocation.New(0)
|
||||
|
||||
didRun = false
|
||||
})
|
||||
|
||||
Describe("synchronous functions", func() {
|
||||
Context("when the function passes", func() {
|
||||
BeforeEach(func() {
|
||||
outcome, failure = build(func() {
|
||||
didRun = true
|
||||
}, 0, failer, componentCodeLocation).Run()
|
||||
})
|
||||
|
||||
It("should have a succesful outcome", func() {
|
||||
Ω(didRun).Should(BeTrue())
|
||||
|
||||
Ω(outcome).Should(Equal(types.SpecStatePassed))
|
||||
Ω(failure).Should(BeZero())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a failure occurs", func() {
|
||||
BeforeEach(func() {
|
||||
outcome, failure = build(func() {
|
||||
didRun = true
|
||||
failer.Fail("bam", innerCodeLocation)
|
||||
panic("should not matter")
|
||||
}, 0, failer, componentCodeLocation).Run()
|
||||
})
|
||||
|
||||
It("should return the failure", func() {
|
||||
Ω(didRun).Should(BeTrue())
|
||||
|
||||
Ω(outcome).Should(Equal(types.SpecStateFailed))
|
||||
Ω(failure).Should(Equal(types.SpecFailure{
|
||||
Message: "bam",
|
||||
Location: innerCodeLocation,
|
||||
ForwardedPanic: "",
|
||||
ComponentIndex: componentIndex,
|
||||
ComponentType: componentType,
|
||||
ComponentCodeLocation: componentCodeLocation,
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a panic occurs", func() {
|
||||
BeforeEach(func() {
|
||||
outcome, failure = build(func() {
|
||||
didRun = true
|
||||
innerCodeLocation = codelocation.New(0)
|
||||
panic("ack!")
|
||||
}, 0, failer, componentCodeLocation).Run()
|
||||
})
|
||||
|
||||
It("should return the panic", func() {
|
||||
Ω(didRun).Should(BeTrue())
|
||||
|
||||
Ω(outcome).Should(Equal(types.SpecStatePanicked))
|
||||
Ω(failure.ForwardedPanic).Should(Equal("ack!"))
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func AsynchronousSharedRunnerBehaviors(build func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable, componentType types.SpecComponentType, componentIndex int) {
|
||||
var (
|
||||
outcome types.SpecState
|
||||
failure types.SpecFailure
|
||||
|
||||
failer *Failer.Failer
|
||||
|
||||
componentCodeLocation types.CodeLocation
|
||||
innerCodeLocation types.CodeLocation
|
||||
|
||||
didRun bool
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
failer = Failer.New()
|
||||
componentCodeLocation = codelocation.New(0)
|
||||
innerCodeLocation = codelocation.New(0)
|
||||
|
||||
didRun = false
|
||||
})
|
||||
|
||||
Describe("asynchronous functions", func() {
|
||||
var timeoutDuration time.Duration
|
||||
|
||||
BeforeEach(func() {
|
||||
timeoutDuration = time.Duration(1 * float64(time.Second))
|
||||
})
|
||||
|
||||
Context("when running", func() {
|
||||
It("should run the function as a goroutine, and block until it's done", func() {
|
||||
initialNumberOfGoRoutines := runtime.NumGoroutine()
|
||||
numberOfGoRoutines := 0
|
||||
|
||||
build(func(done Done) {
|
||||
didRun = true
|
||||
numberOfGoRoutines = runtime.NumGoroutine()
|
||||
close(done)
|
||||
}, timeoutDuration, failer, componentCodeLocation).Run()
|
||||
|
||||
Ω(didRun).Should(BeTrue())
|
||||
Ω(numberOfGoRoutines).Should(BeNumerically(">=", initialNumberOfGoRoutines+1))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the function passes", func() {
|
||||
BeforeEach(func() {
|
||||
outcome, failure = build(func(done Done) {
|
||||
didRun = true
|
||||
close(done)
|
||||
}, timeoutDuration, failer, componentCodeLocation).Run()
|
||||
})
|
||||
|
||||
It("should have a succesful outcome", func() {
|
||||
Ω(didRun).Should(BeTrue())
|
||||
Ω(outcome).Should(Equal(types.SpecStatePassed))
|
||||
Ω(failure).Should(BeZero())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the function fails", func() {
|
||||
BeforeEach(func() {
|
||||
outcome, failure = build(func(done Done) {
|
||||
didRun = true
|
||||
failer.Fail("bam", innerCodeLocation)
|
||||
time.Sleep(20 * time.Millisecond)
|
||||
panic("doesn't matter")
|
||||
close(done)
|
||||
}, 10*time.Millisecond, failer, componentCodeLocation).Run()
|
||||
})
|
||||
|
||||
It("should return the failure", func() {
|
||||
Ω(didRun).Should(BeTrue())
|
||||
|
||||
Ω(outcome).Should(Equal(types.SpecStateFailed))
|
||||
Ω(failure).Should(Equal(types.SpecFailure{
|
||||
Message: "bam",
|
||||
Location: innerCodeLocation,
|
||||
ForwardedPanic: "",
|
||||
ComponentIndex: componentIndex,
|
||||
ComponentType: componentType,
|
||||
ComponentCodeLocation: componentCodeLocation,
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the function times out", func() {
|
||||
var guard chan struct{}
|
||||
|
||||
BeforeEach(func() {
|
||||
guard = make(chan struct{})
|
||||
outcome, failure = build(func(done Done) {
|
||||
didRun = true
|
||||
time.Sleep(20 * time.Millisecond)
|
||||
close(guard)
|
||||
panic("doesn't matter")
|
||||
close(done)
|
||||
}, 10*time.Millisecond, failer, componentCodeLocation).Run()
|
||||
})
|
||||
|
||||
It("should return the timeout", func() {
|
||||
<-guard
|
||||
Ω(didRun).Should(BeTrue())
|
||||
|
||||
Ω(outcome).Should(Equal(types.SpecStateTimedOut))
|
||||
Ω(failure).Should(Equal(types.SpecFailure{
|
||||
Message: "Timed out",
|
||||
Location: componentCodeLocation,
|
||||
ForwardedPanic: "",
|
||||
ComponentIndex: componentIndex,
|
||||
ComponentType: componentType,
|
||||
ComponentCodeLocation: componentCodeLocation,
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the function panics", func() {
|
||||
BeforeEach(func() {
|
||||
outcome, failure = build(func(done Done) {
|
||||
didRun = true
|
||||
innerCodeLocation = codelocation.New(0)
|
||||
panic("ack!")
|
||||
}, 100*time.Millisecond, failer, componentCodeLocation).Run()
|
||||
})
|
||||
|
||||
It("should return the panic", func() {
|
||||
Ω(didRun).Should(BeTrue())
|
||||
|
||||
Ω(outcome).Should(Equal(types.SpecStatePanicked))
|
||||
Ω(failure.ForwardedPanic).Should(Equal("ack!"))
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func InvalidSharedRunnerBehaviors(build func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable, componentType types.SpecComponentType) {
|
||||
var (
|
||||
failer *Failer.Failer
|
||||
componentCodeLocation types.CodeLocation
|
||||
innerCodeLocation types.CodeLocation
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
failer = Failer.New()
|
||||
componentCodeLocation = codelocation.New(0)
|
||||
innerCodeLocation = codelocation.New(0)
|
||||
})
|
||||
|
||||
Describe("invalid functions", func() {
|
||||
Context("when passed something that's not a function", func() {
|
||||
It("should panic", func() {
|
||||
Ω(func() {
|
||||
build("not a function", 0, failer, componentCodeLocation)
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the function takes the wrong kind of argument", func() {
|
||||
It("should panic", func() {
|
||||
Ω(func() {
|
||||
build(func(oops string) {}, 0, failer, componentCodeLocation)
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the function takes more than one argument", func() {
|
||||
It("should panic", func() {
|
||||
Ω(func() {
|
||||
build(func(done Done, oops string) {}, 0, failer, componentCodeLocation)
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
var _ = Describe("Shared RunnableNode behavior", func() {
|
||||
Describe("It Nodes", func() {
|
||||
build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||
return NewItNode("", body, types.FlagTypeFocused, componentCodeLocation, timeout, failer, 3)
|
||||
}
|
||||
|
||||
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeIt, 3)
|
||||
AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeIt, 3)
|
||||
InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeIt)
|
||||
})
|
||||
|
||||
Describe("Measure Nodes", func() {
|
||||
build := func(body interface{}, _ time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||
return NewMeasureNode("", func(Benchmarker) {
|
||||
reflect.ValueOf(body).Call([]reflect.Value{})
|
||||
}, types.FlagTypeFocused, componentCodeLocation, 10, failer, 3)
|
||||
}
|
||||
|
||||
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeMeasure, 3)
|
||||
})
|
||||
|
||||
Describe("BeforeEach Nodes", func() {
|
||||
build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||
return NewBeforeEachNode(body, componentCodeLocation, timeout, failer, 3)
|
||||
}
|
||||
|
||||
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeBeforeEach, 3)
|
||||
AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeBeforeEach, 3)
|
||||
InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeBeforeEach)
|
||||
})
|
||||
|
||||
Describe("AfterEach Nodes", func() {
|
||||
build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||
return NewAfterEachNode(body, componentCodeLocation, timeout, failer, 3)
|
||||
}
|
||||
|
||||
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeAfterEach, 3)
|
||||
AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeAfterEach, 3)
|
||||
InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeAfterEach)
|
||||
})
|
||||
|
||||
Describe("JustBeforeEach Nodes", func() {
|
||||
build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable {
|
||||
return NewJustBeforeEachNode(body, componentCodeLocation, timeout, failer, 3)
|
||||
}
|
||||
|
||||
SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeJustBeforeEach, 3)
|
||||
AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeJustBeforeEach, 3)
|
||||
InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeJustBeforeEach)
|
||||
})
|
||||
})
|
||||
230
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes_test.go
generated
vendored
230
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes_test.go
generated
vendored
@@ -1,230 +0,0 @@
|
||||
package leafnodes_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||
|
||||
"time"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
)
|
||||
|
||||
var _ = Describe("SuiteNodes", func() {
|
||||
Describe("BeforeSuite nodes", func() {
|
||||
var befSuite SuiteNode
|
||||
var failer *Failer.Failer
|
||||
var codeLocation types.CodeLocation
|
||||
var innerCodeLocation types.CodeLocation
|
||||
var outcome bool
|
||||
|
||||
BeforeEach(func() {
|
||||
failer = Failer.New()
|
||||
codeLocation = codelocation.New(0)
|
||||
innerCodeLocation = codelocation.New(0)
|
||||
})
|
||||
|
||||
Context("when the body passes", func() {
|
||||
BeforeEach(func() {
|
||||
befSuite = NewBeforeSuiteNode(func() {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}, codeLocation, 0, failer)
|
||||
outcome = befSuite.Run(0, 0, "")
|
||||
})
|
||||
|
||||
It("should return true when run and report as passed", func() {
|
||||
Ω(outcome).Should(BeTrue())
|
||||
Ω(befSuite.Passed()).Should(BeTrue())
|
||||
})
|
||||
|
||||
It("should have the correct summary", func() {
|
||||
summary := befSuite.Summary()
|
||||
Ω(summary.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||
Ω(summary.CodeLocation).Should(Equal(codeLocation))
|
||||
Ω(summary.State).Should(Equal(types.SpecStatePassed))
|
||||
Ω(summary.RunTime).Should(BeNumerically(">=", 10*time.Millisecond))
|
||||
Ω(summary.Failure).Should(BeZero())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the body fails", func() {
|
||||
BeforeEach(func() {
|
||||
befSuite = NewBeforeSuiteNode(func() {
|
||||
failer.Fail("oops", innerCodeLocation)
|
||||
}, codeLocation, 0, failer)
|
||||
outcome = befSuite.Run(0, 0, "")
|
||||
})
|
||||
|
||||
It("should return false when run and report as failed", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(befSuite.Passed()).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should have the correct summary", func() {
|
||||
summary := befSuite.Summary()
|
||||
Ω(summary.State).Should(Equal(types.SpecStateFailed))
|
||||
Ω(summary.Failure.Message).Should(Equal("oops"))
|
||||
Ω(summary.Failure.Location).Should(Equal(innerCodeLocation))
|
||||
Ω(summary.Failure.ForwardedPanic).Should(BeEmpty())
|
||||
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the body times out", func() {
|
||||
BeforeEach(func() {
|
||||
befSuite = NewBeforeSuiteNode(func(done Done) {
|
||||
}, codeLocation, time.Millisecond, failer)
|
||||
outcome = befSuite.Run(0, 0, "")
|
||||
})
|
||||
|
||||
It("should return false when run and report as failed", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(befSuite.Passed()).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should have the correct summary", func() {
|
||||
summary := befSuite.Summary()
|
||||
Ω(summary.State).Should(Equal(types.SpecStateTimedOut))
|
||||
Ω(summary.Failure.ForwardedPanic).Should(BeEmpty())
|
||||
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the body panics", func() {
|
||||
BeforeEach(func() {
|
||||
befSuite = NewBeforeSuiteNode(func() {
|
||||
panic("bam")
|
||||
}, codeLocation, 0, failer)
|
||||
outcome = befSuite.Run(0, 0, "")
|
||||
})
|
||||
|
||||
It("should return false when run and report as failed", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(befSuite.Passed()).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should have the correct summary", func() {
|
||||
summary := befSuite.Summary()
|
||||
Ω(summary.State).Should(Equal(types.SpecStatePanicked))
|
||||
Ω(summary.Failure.ForwardedPanic).Should(Equal("bam"))
|
||||
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("AfterSuite nodes", func() {
|
||||
var aftSuite SuiteNode
|
||||
var failer *Failer.Failer
|
||||
var codeLocation types.CodeLocation
|
||||
var innerCodeLocation types.CodeLocation
|
||||
var outcome bool
|
||||
|
||||
BeforeEach(func() {
|
||||
failer = Failer.New()
|
||||
codeLocation = codelocation.New(0)
|
||||
innerCodeLocation = codelocation.New(0)
|
||||
})
|
||||
|
||||
Context("when the body passes", func() {
|
||||
BeforeEach(func() {
|
||||
aftSuite = NewAfterSuiteNode(func() {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}, codeLocation, 0, failer)
|
||||
outcome = aftSuite.Run(0, 0, "")
|
||||
})
|
||||
|
||||
It("should return true when run and report as passed", func() {
|
||||
Ω(outcome).Should(BeTrue())
|
||||
Ω(aftSuite.Passed()).Should(BeTrue())
|
||||
})
|
||||
|
||||
It("should have the correct summary", func() {
|
||||
summary := aftSuite.Summary()
|
||||
Ω(summary.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite))
|
||||
Ω(summary.CodeLocation).Should(Equal(codeLocation))
|
||||
Ω(summary.State).Should(Equal(types.SpecStatePassed))
|
||||
Ω(summary.RunTime).Should(BeNumerically(">=", 10*time.Millisecond))
|
||||
Ω(summary.Failure).Should(BeZero())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the body fails", func() {
|
||||
BeforeEach(func() {
|
||||
aftSuite = NewAfterSuiteNode(func() {
|
||||
failer.Fail("oops", innerCodeLocation)
|
||||
}, codeLocation, 0, failer)
|
||||
outcome = aftSuite.Run(0, 0, "")
|
||||
})
|
||||
|
||||
It("should return false when run and report as failed", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(aftSuite.Passed()).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should have the correct summary", func() {
|
||||
summary := aftSuite.Summary()
|
||||
Ω(summary.State).Should(Equal(types.SpecStateFailed))
|
||||
Ω(summary.Failure.Message).Should(Equal("oops"))
|
||||
Ω(summary.Failure.Location).Should(Equal(innerCodeLocation))
|
||||
Ω(summary.Failure.ForwardedPanic).Should(BeEmpty())
|
||||
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite))
|
||||
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the body times out", func() {
|
||||
BeforeEach(func() {
|
||||
aftSuite = NewAfterSuiteNode(func(done Done) {
|
||||
}, codeLocation, time.Millisecond, failer)
|
||||
outcome = aftSuite.Run(0, 0, "")
|
||||
})
|
||||
|
||||
It("should return false when run and report as failed", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(aftSuite.Passed()).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should have the correct summary", func() {
|
||||
summary := aftSuite.Summary()
|
||||
Ω(summary.State).Should(Equal(types.SpecStateTimedOut))
|
||||
Ω(summary.Failure.ForwardedPanic).Should(BeEmpty())
|
||||
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite))
|
||||
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the body panics", func() {
|
||||
BeforeEach(func() {
|
||||
aftSuite = NewAfterSuiteNode(func() {
|
||||
panic("bam")
|
||||
}, codeLocation, 0, failer)
|
||||
outcome = aftSuite.Run(0, 0, "")
|
||||
})
|
||||
|
||||
It("should return false when run and report as failed", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(aftSuite.Passed()).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should have the correct summary", func() {
|
||||
summary := aftSuite.Summary()
|
||||
Ω(summary.State).Should(Equal(types.SpecStatePanicked))
|
||||
Ω(summary.Failure.ForwardedPanic).Should(Equal("bam"))
|
||||
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite))
|
||||
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,196 +0,0 @@
|
||||
package leafnodes_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
"sync"
|
||||
|
||||
"github.com/onsi/gomega/ghttp"
|
||||
"net/http"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||
"time"
|
||||
)
|
||||
|
||||
var _ = Describe("SynchronizedAfterSuiteNode", func() {
|
||||
var failer *Failer.Failer
|
||||
var node SuiteNode
|
||||
var codeLocation types.CodeLocation
|
||||
var innerCodeLocation types.CodeLocation
|
||||
var outcome bool
|
||||
var server *ghttp.Server
|
||||
var things []string
|
||||
var lock *sync.Mutex
|
||||
|
||||
BeforeEach(func() {
|
||||
things = []string{}
|
||||
server = ghttp.NewServer()
|
||||
codeLocation = codelocation.New(0)
|
||||
innerCodeLocation = codelocation.New(0)
|
||||
failer = Failer.New()
|
||||
lock = &sync.Mutex{}
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
server.Close()
|
||||
})
|
||||
|
||||
newNode := func(bodyA interface{}, bodyB interface{}) SuiteNode {
|
||||
return NewSynchronizedAfterSuiteNode(bodyA, bodyB, codeLocation, time.Millisecond, failer)
|
||||
}
|
||||
|
||||
ranThing := func(thing string) {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
things = append(things, thing)
|
||||
}
|
||||
|
||||
thingsThatRan := func() []string {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
return things
|
||||
}
|
||||
|
||||
Context("when not running in parallel", func() {
|
||||
Context("when all is well", func() {
|
||||
BeforeEach(func() {
|
||||
node = newNode(func() {
|
||||
ranThing("A")
|
||||
}, func() {
|
||||
ranThing("B")
|
||||
})
|
||||
|
||||
outcome = node.Run(1, 1, server.URL())
|
||||
})
|
||||
|
||||
It("should run A, then B", func() {
|
||||
Ω(thingsThatRan()).Should(Equal([]string{"A", "B"}))
|
||||
})
|
||||
|
||||
It("should report success", func() {
|
||||
Ω(outcome).Should(BeTrue())
|
||||
Ω(node.Passed()).Should(BeTrue())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStatePassed))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when A fails", func() {
|
||||
BeforeEach(func() {
|
||||
node = newNode(func() {
|
||||
ranThing("A")
|
||||
failer.Fail("bam", innerCodeLocation)
|
||||
}, func() {
|
||||
ranThing("B")
|
||||
})
|
||||
|
||||
outcome = node.Run(1, 1, server.URL())
|
||||
})
|
||||
|
||||
It("should still run B", func() {
|
||||
Ω(thingsThatRan()).Should(Equal([]string{"A", "B"}))
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(node.Passed()).Should(BeFalse())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStateFailed))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when B fails", func() {
|
||||
BeforeEach(func() {
|
||||
node = newNode(func() {
|
||||
ranThing("A")
|
||||
}, func() {
|
||||
ranThing("B")
|
||||
failer.Fail("bam", innerCodeLocation)
|
||||
})
|
||||
|
||||
outcome = node.Run(1, 1, server.URL())
|
||||
})
|
||||
|
||||
It("should run all the things", func() {
|
||||
Ω(thingsThatRan()).Should(Equal([]string{"A", "B"}))
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(node.Passed()).Should(BeFalse())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStateFailed))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("when running in parallel", func() {
|
||||
Context("as the first node", func() {
|
||||
BeforeEach(func() {
|
||||
server.AppendHandlers(ghttp.CombineHandlers(
|
||||
ghttp.VerifyRequest("GET", "/RemoteAfterSuiteData"),
|
||||
func(writer http.ResponseWriter, request *http.Request) {
|
||||
ranThing("Request1")
|
||||
},
|
||||
ghttp.RespondWithJSONEncoded(200, types.RemoteAfterSuiteData{false}),
|
||||
), ghttp.CombineHandlers(
|
||||
ghttp.VerifyRequest("GET", "/RemoteAfterSuiteData"),
|
||||
func(writer http.ResponseWriter, request *http.Request) {
|
||||
ranThing("Request2")
|
||||
},
|
||||
ghttp.RespondWithJSONEncoded(200, types.RemoteAfterSuiteData{false}),
|
||||
), ghttp.CombineHandlers(
|
||||
ghttp.VerifyRequest("GET", "/RemoteAfterSuiteData"),
|
||||
func(writer http.ResponseWriter, request *http.Request) {
|
||||
ranThing("Request3")
|
||||
},
|
||||
ghttp.RespondWithJSONEncoded(200, types.RemoteAfterSuiteData{true}),
|
||||
))
|
||||
|
||||
node = newNode(func() {
|
||||
ranThing("A")
|
||||
}, func() {
|
||||
ranThing("B")
|
||||
})
|
||||
|
||||
outcome = node.Run(1, 3, server.URL())
|
||||
})
|
||||
|
||||
It("should run A and, when the server says its time, run B", func() {
|
||||
Ω(thingsThatRan()).Should(Equal([]string{"A", "Request1", "Request2", "Request3", "B"}))
|
||||
})
|
||||
|
||||
It("should report success", func() {
|
||||
Ω(outcome).Should(BeTrue())
|
||||
Ω(node.Passed()).Should(BeTrue())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStatePassed))
|
||||
})
|
||||
})
|
||||
|
||||
Context("as any other node", func() {
|
||||
BeforeEach(func() {
|
||||
node = newNode(func() {
|
||||
ranThing("A")
|
||||
}, func() {
|
||||
ranThing("B")
|
||||
})
|
||||
|
||||
outcome = node.Run(2, 3, server.URL())
|
||||
})
|
||||
|
||||
It("should run A, and not run B", func() {
|
||||
Ω(thingsThatRan()).Should(Equal([]string{"A"}))
|
||||
})
|
||||
|
||||
It("should not talk to the server", func() {
|
||||
Ω(server.ReceivedRequests()).Should(BeEmpty())
|
||||
})
|
||||
|
||||
It("should report success", func() {
|
||||
Ω(outcome).Should(BeTrue())
|
||||
Ω(node.Passed()).Should(BeTrue())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStatePassed))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,445 +0,0 @@
|
||||
package leafnodes_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/leafnodes"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/onsi/gomega/ghttp"
|
||||
"net/http"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
"time"
|
||||
)
|
||||
|
||||
var _ = Describe("SynchronizedBeforeSuiteNode", func() {
|
||||
var failer *Failer.Failer
|
||||
var node SuiteNode
|
||||
var codeLocation types.CodeLocation
|
||||
var innerCodeLocation types.CodeLocation
|
||||
var outcome bool
|
||||
var server *ghttp.Server
|
||||
|
||||
BeforeEach(func() {
|
||||
server = ghttp.NewServer()
|
||||
codeLocation = codelocation.New(0)
|
||||
innerCodeLocation = codelocation.New(0)
|
||||
failer = Failer.New()
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
server.Close()
|
||||
})
|
||||
|
||||
newNode := func(bodyA interface{}, bodyB interface{}) SuiteNode {
|
||||
return NewSynchronizedBeforeSuiteNode(bodyA, bodyB, codeLocation, time.Millisecond, failer)
|
||||
}
|
||||
|
||||
Describe("when not running in parallel", func() {
|
||||
Context("when all is well", func() {
|
||||
var data []byte
|
||||
BeforeEach(func() {
|
||||
data = nil
|
||||
|
||||
node = newNode(func() []byte {
|
||||
return []byte("my data")
|
||||
}, func(d []byte) {
|
||||
data = d
|
||||
})
|
||||
|
||||
outcome = node.Run(1, 1, server.URL())
|
||||
})
|
||||
|
||||
It("should run A, then B passing the output from A to B", func() {
|
||||
Ω(data).Should(Equal([]byte("my data")))
|
||||
})
|
||||
|
||||
It("should report success", func() {
|
||||
Ω(outcome).Should(BeTrue())
|
||||
Ω(node.Passed()).Should(BeTrue())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStatePassed))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when A fails", func() {
|
||||
var ranB bool
|
||||
BeforeEach(func() {
|
||||
ranB = false
|
||||
node = newNode(func() []byte {
|
||||
failer.Fail("boom", innerCodeLocation)
|
||||
return nil
|
||||
}, func([]byte) {
|
||||
ranB = true
|
||||
})
|
||||
|
||||
outcome = node.Run(1, 1, server.URL())
|
||||
})
|
||||
|
||||
It("should not run B", func() {
|
||||
Ω(ranB).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(node.Passed()).Should(BeFalse())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStateFailed))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when B fails", func() {
|
||||
BeforeEach(func() {
|
||||
node = newNode(func() []byte {
|
||||
return nil
|
||||
}, func([]byte) {
|
||||
failer.Fail("boom", innerCodeLocation)
|
||||
})
|
||||
|
||||
outcome = node.Run(1, 1, server.URL())
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(node.Passed()).Should(BeFalse())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStateFailed))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when A times out", func() {
|
||||
var ranB bool
|
||||
BeforeEach(func() {
|
||||
ranB = false
|
||||
node = newNode(func(Done) []byte {
|
||||
time.Sleep(time.Second)
|
||||
return nil
|
||||
}, func([]byte) {
|
||||
ranB = true
|
||||
})
|
||||
|
||||
outcome = node.Run(1, 1, server.URL())
|
||||
})
|
||||
|
||||
It("should not run B", func() {
|
||||
Ω(ranB).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(node.Passed()).Should(BeFalse())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStateTimedOut))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when B times out", func() {
|
||||
BeforeEach(func() {
|
||||
node = newNode(func() []byte {
|
||||
return nil
|
||||
}, func([]byte, Done) {
|
||||
time.Sleep(time.Second)
|
||||
})
|
||||
|
||||
outcome = node.Run(1, 1, server.URL())
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(node.Passed()).Should(BeFalse())
|
||||
Ω(node.Summary().State).Should(Equal(types.SpecStateTimedOut))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("when running in parallel", func() {
|
||||
var ranB bool
|
||||
var parallelNode, parallelTotal int
|
||||
BeforeEach(func() {
|
||||
ranB = false
|
||||
parallelNode, parallelTotal = 1, 3
|
||||
})
|
||||
|
||||
Context("as the first node, it runs A", func() {
|
||||
var expectedState types.RemoteBeforeSuiteData
|
||||
|
||||
BeforeEach(func() {
|
||||
parallelNode, parallelTotal = 1, 3
|
||||
})
|
||||
|
||||
JustBeforeEach(func() {
|
||||
server.AppendHandlers(ghttp.CombineHandlers(
|
||||
ghttp.VerifyRequest("POST", "/BeforeSuiteState"),
|
||||
ghttp.VerifyJSONRepresenting(expectedState),
|
||||
))
|
||||
|
||||
outcome = node.Run(parallelNode, parallelTotal, server.URL())
|
||||
})
|
||||
|
||||
Context("when A succeeds", func() {
|
||||
BeforeEach(func() {
|
||||
expectedState = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStatePassed}
|
||||
|
||||
node = newNode(func() []byte {
|
||||
return []byte("my data")
|
||||
}, func([]byte) {
|
||||
ranB = true
|
||||
})
|
||||
})
|
||||
|
||||
It("should post about A succeeding", func() {
|
||||
Ω(server.ReceivedRequests()).Should(HaveLen(1))
|
||||
})
|
||||
|
||||
It("should run B", func() {
|
||||
Ω(ranB).Should(BeTrue())
|
||||
})
|
||||
|
||||
It("should report success", func() {
|
||||
Ω(outcome).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when A fails", func() {
|
||||
BeforeEach(func() {
|
||||
expectedState = types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStateFailed}
|
||||
|
||||
node = newNode(func() []byte {
|
||||
panic("BAM")
|
||||
return []byte("my data")
|
||||
}, func([]byte) {
|
||||
ranB = true
|
||||
})
|
||||
})
|
||||
|
||||
It("should post about A failing", func() {
|
||||
Ω(server.ReceivedRequests()).Should(HaveLen(1))
|
||||
})
|
||||
|
||||
It("should not run B", func() {
|
||||
Ω(ranB).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("as the Nth node", func() {
|
||||
var statusCode int
|
||||
var response interface{}
|
||||
var ranA bool
|
||||
var bData []byte
|
||||
|
||||
BeforeEach(func() {
|
||||
ranA = false
|
||||
bData = nil
|
||||
|
||||
statusCode = http.StatusOK
|
||||
|
||||
server.AppendHandlers(ghttp.CombineHandlers(
|
||||
ghttp.VerifyRequest("GET", "/BeforeSuiteState"),
|
||||
ghttp.RespondWith(http.StatusOK, string((types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}).ToJSON())),
|
||||
), ghttp.CombineHandlers(
|
||||
ghttp.VerifyRequest("GET", "/BeforeSuiteState"),
|
||||
ghttp.RespondWith(http.StatusOK, string((types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}).ToJSON())),
|
||||
), ghttp.CombineHandlers(
|
||||
ghttp.VerifyRequest("GET", "/BeforeSuiteState"),
|
||||
ghttp.RespondWithJSONEncodedPtr(&statusCode, &response),
|
||||
))
|
||||
|
||||
node = newNode(func() []byte {
|
||||
ranA = true
|
||||
return nil
|
||||
}, func(data []byte) {
|
||||
bData = data
|
||||
})
|
||||
|
||||
parallelNode, parallelTotal = 2, 3
|
||||
})
|
||||
|
||||
Context("when A on node1 succeeds", func() {
|
||||
BeforeEach(func() {
|
||||
response = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStatePassed}
|
||||
outcome = node.Run(parallelNode, parallelTotal, server.URL())
|
||||
})
|
||||
|
||||
It("should not run A", func() {
|
||||
Ω(ranA).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should poll for A", func() {
|
||||
Ω(server.ReceivedRequests()).Should(HaveLen(3))
|
||||
})
|
||||
|
||||
It("should run B when the polling succeeds", func() {
|
||||
Ω(bData).Should(Equal([]byte("my data")))
|
||||
})
|
||||
|
||||
It("should succeed", func() {
|
||||
Ω(outcome).Should(BeTrue())
|
||||
Ω(node.Passed()).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when A on node1 fails", func() {
|
||||
BeforeEach(func() {
|
||||
response = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStateFailed}
|
||||
outcome = node.Run(parallelNode, parallelTotal, server.URL())
|
||||
})
|
||||
|
||||
It("should not run A", func() {
|
||||
Ω(ranA).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should poll for A", func() {
|
||||
Ω(server.ReceivedRequests()).Should(HaveLen(3))
|
||||
})
|
||||
|
||||
It("should not run B", func() {
|
||||
Ω(bData).Should(BeNil())
|
||||
})
|
||||
|
||||
It("should fail", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(node.Passed()).Should(BeFalse())
|
||||
|
||||
summary := node.Summary()
|
||||
Ω(summary.State).Should(Equal(types.SpecStateFailed))
|
||||
Ω(summary.Failure.Message).Should(Equal("BeforeSuite on Node 1 failed"))
|
||||
Ω(summary.Failure.Location).Should(Equal(codeLocation))
|
||||
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when node1 disappears", func() {
|
||||
BeforeEach(func() {
|
||||
response = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStateDisappeared}
|
||||
outcome = node.Run(parallelNode, parallelTotal, server.URL())
|
||||
})
|
||||
|
||||
It("should not run A", func() {
|
||||
Ω(ranA).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should poll for A", func() {
|
||||
Ω(server.ReceivedRequests()).Should(HaveLen(3))
|
||||
})
|
||||
|
||||
It("should not run B", func() {
|
||||
Ω(bData).Should(BeNil())
|
||||
})
|
||||
|
||||
It("should fail", func() {
|
||||
Ω(outcome).Should(BeFalse())
|
||||
Ω(node.Passed()).Should(BeFalse())
|
||||
|
||||
summary := node.Summary()
|
||||
Ω(summary.State).Should(Equal(types.SpecStateFailed))
|
||||
Ω(summary.Failure.Message).Should(Equal("Node 1 disappeared before completing BeforeSuite"))
|
||||
Ω(summary.Failure.Location).Should(Equal(codeLocation))
|
||||
Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite))
|
||||
Ω(summary.Failure.ComponentIndex).Should(Equal(0))
|
||||
Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("construction", func() {
|
||||
Describe("the first function", func() {
|
||||
Context("when the first function returns a byte array", func() {
|
||||
Context("and takes nothing", func() {
|
||||
It("should be fine", func() {
|
||||
Ω(func() {
|
||||
newNode(func() []byte { return nil }, func([]byte) {})
|
||||
}).ShouldNot(Panic())
|
||||
})
|
||||
})
|
||||
|
||||
Context("and takes a done function", func() {
|
||||
It("should be fine", func() {
|
||||
Ω(func() {
|
||||
newNode(func(Done) []byte { return nil }, func([]byte) {})
|
||||
}).ShouldNot(Panic())
|
||||
})
|
||||
})
|
||||
|
||||
Context("and takes more than one thing", func() {
|
||||
It("should panic", func() {
|
||||
Ω(func() {
|
||||
newNode(func(Done, Done) []byte { return nil }, func([]byte) {})
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
|
||||
Context("and takes something else", func() {
|
||||
It("should panic", func() {
|
||||
Ω(func() {
|
||||
newNode(func(bool) []byte { return nil }, func([]byte) {})
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the first function does not return a byte array", func() {
|
||||
It("should panic", func() {
|
||||
Ω(func() {
|
||||
newNode(func() {}, func([]byte) {})
|
||||
}).Should(Panic())
|
||||
|
||||
Ω(func() {
|
||||
newNode(func() []int { return nil }, func([]byte) {})
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("the second function", func() {
|
||||
Context("when the second function takes a byte array", func() {
|
||||
It("should be fine", func() {
|
||||
Ω(func() {
|
||||
newNode(func() []byte { return nil }, func([]byte) {})
|
||||
}).ShouldNot(Panic())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when it also takes a done channel", func() {
|
||||
It("should be fine", func() {
|
||||
Ω(func() {
|
||||
newNode(func() []byte { return nil }, func([]byte, Done) {})
|
||||
}).ShouldNot(Panic())
|
||||
})
|
||||
})
|
||||
|
||||
Context("if it takes anything else", func() {
|
||||
It("should panic", func() {
|
||||
Ω(func() {
|
||||
newNode(func() []byte { return nil }, func([]byte, chan bool) {})
|
||||
}).Should(Panic())
|
||||
|
||||
Ω(func() {
|
||||
newNode(func() []byte { return nil }, func(string) {})
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
|
||||
Context("if it takes nothing at all", func() {
|
||||
It("should panic", func() {
|
||||
Ω(func() {
|
||||
newNode(func() []byte { return nil }, func() {})
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
|
||||
Context("if it returns something", func() {
|
||||
It("should panic", func() {
|
||||
Ω(func() {
|
||||
newNode(func() []byte { return nil }, func([]byte) []byte { return nil })
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
311
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/aggregator_test.go
generated
vendored
311
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/aggregator_test.go
generated
vendored
@@ -1,311 +0,0 @@
|
||||
package remote_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/onsi/ginkgo/config"
|
||||
. "github.com/onsi/ginkgo/internal/remote"
|
||||
st "github.com/onsi/ginkgo/reporters/stenographer"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
"time"
|
||||
)
|
||||
|
||||
var _ = Describe("Aggregator", func() {
|
||||
var (
|
||||
aggregator *Aggregator
|
||||
reporterConfig config.DefaultReporterConfigType
|
||||
stenographer *st.FakeStenographer
|
||||
result chan bool
|
||||
|
||||
ginkgoConfig1 config.GinkgoConfigType
|
||||
ginkgoConfig2 config.GinkgoConfigType
|
||||
|
||||
suiteSummary1 *types.SuiteSummary
|
||||
suiteSummary2 *types.SuiteSummary
|
||||
|
||||
beforeSummary *types.SetupSummary
|
||||
afterSummary *types.SetupSummary
|
||||
specSummary *types.SpecSummary
|
||||
|
||||
suiteDescription string
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
reporterConfig = config.DefaultReporterConfigType{
|
||||
NoColor: false,
|
||||
SlowSpecThreshold: 0.1,
|
||||
NoisyPendings: true,
|
||||
Succinct: false,
|
||||
Verbose: true,
|
||||
}
|
||||
stenographer = st.NewFakeStenographer()
|
||||
result = make(chan bool, 1)
|
||||
aggregator = NewAggregator(2, result, reporterConfig, stenographer)
|
||||
|
||||
//
|
||||
// now set up some fixture data
|
||||
//
|
||||
|
||||
ginkgoConfig1 = config.GinkgoConfigType{
|
||||
RandomSeed: 1138,
|
||||
RandomizeAllSpecs: true,
|
||||
ParallelNode: 1,
|
||||
ParallelTotal: 2,
|
||||
}
|
||||
|
||||
ginkgoConfig2 = config.GinkgoConfigType{
|
||||
RandomSeed: 1138,
|
||||
RandomizeAllSpecs: true,
|
||||
ParallelNode: 2,
|
||||
ParallelTotal: 2,
|
||||
}
|
||||
|
||||
suiteDescription = "My Parallel Suite"
|
||||
|
||||
suiteSummary1 = &types.SuiteSummary{
|
||||
SuiteDescription: suiteDescription,
|
||||
|
||||
NumberOfSpecsBeforeParallelization: 30,
|
||||
NumberOfTotalSpecs: 17,
|
||||
NumberOfSpecsThatWillBeRun: 15,
|
||||
NumberOfPendingSpecs: 1,
|
||||
NumberOfSkippedSpecs: 1,
|
||||
}
|
||||
|
||||
suiteSummary2 = &types.SuiteSummary{
|
||||
SuiteDescription: suiteDescription,
|
||||
|
||||
NumberOfSpecsBeforeParallelization: 30,
|
||||
NumberOfTotalSpecs: 13,
|
||||
NumberOfSpecsThatWillBeRun: 8,
|
||||
NumberOfPendingSpecs: 2,
|
||||
NumberOfSkippedSpecs: 3,
|
||||
}
|
||||
|
||||
beforeSummary = &types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
CapturedOutput: "BeforeSuiteOutput",
|
||||
}
|
||||
|
||||
afterSummary = &types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
CapturedOutput: "AfterSuiteOutput",
|
||||
}
|
||||
|
||||
specSummary = &types.SpecSummary{
|
||||
State: types.SpecStatePassed,
|
||||
CapturedOutput: "SpecOutput",
|
||||
}
|
||||
})
|
||||
|
||||
call := func(method string, args ...interface{}) st.FakeStenographerCall {
|
||||
return st.NewFakeStenographerCall(method, args...)
|
||||
}
|
||||
|
||||
beginSuite := func() {
|
||||
stenographer.Reset()
|
||||
aggregator.SpecSuiteWillBegin(ginkgoConfig2, suiteSummary2)
|
||||
aggregator.SpecSuiteWillBegin(ginkgoConfig1, suiteSummary1)
|
||||
Eventually(func() interface{} {
|
||||
return len(stenographer.Calls())
|
||||
}).Should(BeNumerically(">=", 3))
|
||||
}
|
||||
|
||||
Describe("Announcing the beginning of the suite", func() {
|
||||
Context("When one of the parallel-suites starts", func() {
|
||||
BeforeEach(func() {
|
||||
aggregator.SpecSuiteWillBegin(ginkgoConfig2, suiteSummary2)
|
||||
})
|
||||
|
||||
It("should be silent", func() {
|
||||
Consistently(func() interface{} { return stenographer.Calls() }).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("once all of the parallel-suites have started", func() {
|
||||
BeforeEach(func() {
|
||||
aggregator.SpecSuiteWillBegin(ginkgoConfig2, suiteSummary2)
|
||||
aggregator.SpecSuiteWillBegin(ginkgoConfig1, suiteSummary1)
|
||||
Eventually(func() interface{} {
|
||||
return stenographer.Calls()
|
||||
}).Should(HaveLen(3))
|
||||
})
|
||||
|
||||
It("should announce the beginning of the suite", func() {
|
||||
Ω(stenographer.Calls()).Should(HaveLen(3))
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuite", suiteDescription, ginkgoConfig1.RandomSeed, true, false)))
|
||||
Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceNumberOfSpecs", 23, 30, false)))
|
||||
Ω(stenographer.Calls()[2]).Should(Equal(call("AnnounceAggregatedParallelRun", 2, false)))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Announcing specs and before suites", func() {
|
||||
Context("when the parallel-suites have not all started", func() {
|
||||
BeforeEach(func() {
|
||||
aggregator.BeforeSuiteDidRun(beforeSummary)
|
||||
aggregator.AfterSuiteDidRun(afterSummary)
|
||||
aggregator.SpecDidComplete(specSummary)
|
||||
})
|
||||
|
||||
It("should not announce any specs", func() {
|
||||
Consistently(func() interface{} { return stenographer.Calls() }).Should(BeEmpty())
|
||||
})
|
||||
|
||||
Context("when the parallel-suites subsequently start", func() {
|
||||
BeforeEach(func() {
|
||||
beginSuite()
|
||||
})
|
||||
|
||||
It("should announce the specs, the before suites and the after suites", func() {
|
||||
Eventually(func() interface{} {
|
||||
return stenographer.Calls()
|
||||
}).Should(ContainElement(call("AnnounceSuccesfulSpec", specSummary)))
|
||||
|
||||
Ω(stenographer.Calls()).Should(ContainElement(call("AnnounceCapturedOutput", beforeSummary.CapturedOutput)))
|
||||
Ω(stenographer.Calls()).Should(ContainElement(call("AnnounceCapturedOutput", afterSummary.CapturedOutput)))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the parallel-suites have all started", func() {
|
||||
BeforeEach(func() {
|
||||
beginSuite()
|
||||
stenographer.Reset()
|
||||
})
|
||||
|
||||
Context("When a spec completes", func() {
|
||||
BeforeEach(func() {
|
||||
aggregator.BeforeSuiteDidRun(beforeSummary)
|
||||
aggregator.SpecDidComplete(specSummary)
|
||||
aggregator.AfterSuiteDidRun(afterSummary)
|
||||
Eventually(func() interface{} {
|
||||
return stenographer.Calls()
|
||||
}).Should(HaveLen(5))
|
||||
})
|
||||
|
||||
It("should announce the captured output of the BeforeSuite", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceCapturedOutput", beforeSummary.CapturedOutput)))
|
||||
})
|
||||
|
||||
It("should announce that the spec will run (when in verbose mode)", func() {
|
||||
Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceSpecWillRun", specSummary)))
|
||||
})
|
||||
|
||||
It("should announce the captured stdout of the spec", func() {
|
||||
Ω(stenographer.Calls()[2]).Should(Equal(call("AnnounceCapturedOutput", specSummary.CapturedOutput)))
|
||||
})
|
||||
|
||||
It("should announce completion", func() {
|
||||
Ω(stenographer.Calls()[3]).Should(Equal(call("AnnounceSuccesfulSpec", specSummary)))
|
||||
})
|
||||
|
||||
It("should announce the captured output of the AfterSuite", func() {
|
||||
Ω(stenographer.Calls()[4]).Should(Equal(call("AnnounceCapturedOutput", afterSummary.CapturedOutput)))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Announcing the end of the suite", func() {
|
||||
BeforeEach(func() {
|
||||
beginSuite()
|
||||
stenographer.Reset()
|
||||
})
|
||||
|
||||
Context("When one of the parallel-suites ends", func() {
|
||||
BeforeEach(func() {
|
||||
aggregator.SpecSuiteDidEnd(suiteSummary2)
|
||||
})
|
||||
|
||||
It("should be silent", func() {
|
||||
Consistently(func() interface{} { return stenographer.Calls() }).Should(BeEmpty())
|
||||
})
|
||||
|
||||
It("should not notify the channel", func() {
|
||||
Ω(result).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("once all of the parallel-suites end", func() {
|
||||
BeforeEach(func() {
|
||||
time.Sleep(200 * time.Millisecond)
|
||||
|
||||
suiteSummary1.SuiteSucceeded = true
|
||||
suiteSummary1.NumberOfPassedSpecs = 15
|
||||
suiteSummary1.NumberOfFailedSpecs = 0
|
||||
suiteSummary2.SuiteSucceeded = false
|
||||
suiteSummary2.NumberOfPassedSpecs = 5
|
||||
suiteSummary2.NumberOfFailedSpecs = 3
|
||||
|
||||
aggregator.SpecSuiteDidEnd(suiteSummary2)
|
||||
aggregator.SpecSuiteDidEnd(suiteSummary1)
|
||||
Eventually(func() interface{} {
|
||||
return stenographer.Calls()
|
||||
}).Should(HaveLen(2))
|
||||
})
|
||||
|
||||
It("should announce the end of the suite", func() {
|
||||
compositeSummary := stenographer.Calls()[1].Args[0].(*types.SuiteSummary)
|
||||
|
||||
Ω(compositeSummary.SuiteSucceeded).Should(BeFalse())
|
||||
Ω(compositeSummary.NumberOfSpecsThatWillBeRun).Should(Equal(23))
|
||||
Ω(compositeSummary.NumberOfTotalSpecs).Should(Equal(30))
|
||||
Ω(compositeSummary.NumberOfPassedSpecs).Should(Equal(20))
|
||||
Ω(compositeSummary.NumberOfFailedSpecs).Should(Equal(3))
|
||||
Ω(compositeSummary.NumberOfPendingSpecs).Should(Equal(3))
|
||||
Ω(compositeSummary.NumberOfSkippedSpecs).Should(Equal(4))
|
||||
Ω(compositeSummary.RunTime.Seconds()).Should(BeNumerically(">", 0.2))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when all the parallel-suites pass", func() {
|
||||
BeforeEach(func() {
|
||||
suiteSummary1.SuiteSucceeded = true
|
||||
suiteSummary2.SuiteSucceeded = true
|
||||
|
||||
aggregator.SpecSuiteDidEnd(suiteSummary2)
|
||||
aggregator.SpecSuiteDidEnd(suiteSummary1)
|
||||
Eventually(func() interface{} {
|
||||
return stenographer.Calls()
|
||||
}).Should(HaveLen(2))
|
||||
})
|
||||
|
||||
It("should report success", func() {
|
||||
compositeSummary := stenographer.Calls()[1].Args[0].(*types.SuiteSummary)
|
||||
|
||||
Ω(compositeSummary.SuiteSucceeded).Should(BeTrue())
|
||||
})
|
||||
|
||||
It("should notify the channel that it succeded", func(done Done) {
|
||||
Ω(<-result).Should(BeTrue())
|
||||
close(done)
|
||||
})
|
||||
})
|
||||
|
||||
Context("when one of the parallel-suites fails", func() {
|
||||
BeforeEach(func() {
|
||||
suiteSummary1.SuiteSucceeded = true
|
||||
suiteSummary2.SuiteSucceeded = false
|
||||
|
||||
aggregator.SpecSuiteDidEnd(suiteSummary2)
|
||||
aggregator.SpecSuiteDidEnd(suiteSummary1)
|
||||
Eventually(func() interface{} {
|
||||
return stenographer.Calls()
|
||||
}).Should(HaveLen(2))
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
compositeSummary := stenographer.Calls()[1].Args[0].(*types.SuiteSummary)
|
||||
|
||||
Ω(compositeSummary.SuiteSucceeded).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should notify the channel that it failed", func(done Done) {
|
||||
Ω(<-result).Should(BeFalse())
|
||||
close(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,17 +0,0 @@
|
||||
package remote_test
|
||||
|
||||
type fakeOutputInterceptor struct {
|
||||
DidStartInterceptingOutput bool
|
||||
DidStopInterceptingOutput bool
|
||||
InterceptedOutput string
|
||||
}
|
||||
|
||||
func (interceptor *fakeOutputInterceptor) StartInterceptingOutput() error {
|
||||
interceptor.DidStartInterceptingOutput = true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (interceptor *fakeOutputInterceptor) StopInterceptingAndReturnOutput() (string, error) {
|
||||
interceptor.DidStopInterceptingOutput = true
|
||||
return interceptor.InterceptedOutput, nil
|
||||
}
|
||||
33
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/fake_poster_test.go
generated
vendored
33
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/fake_poster_test.go
generated
vendored
@@ -1,33 +0,0 @@
|
||||
package remote_test
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type post struct {
|
||||
url string
|
||||
bodyType string
|
||||
bodyContent []byte
|
||||
}
|
||||
|
||||
type fakePoster struct {
|
||||
posts []post
|
||||
}
|
||||
|
||||
func newFakePoster() *fakePoster {
|
||||
return &fakePoster{
|
||||
posts: make([]post, 0),
|
||||
}
|
||||
}
|
||||
|
||||
func (poster *fakePoster) Post(url string, bodyType string, body io.Reader) (resp *http.Response, err error) {
|
||||
bodyContent, _ := ioutil.ReadAll(body)
|
||||
poster.posts = append(poster.posts, post{
|
||||
url: url,
|
||||
bodyType: bodyType,
|
||||
bodyContent: bodyContent,
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
180
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/forwarding_reporter_test.go
generated
vendored
180
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/forwarding_reporter_test.go
generated
vendored
@@ -1,180 +0,0 @@
|
||||
package remote_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
. "github.com/onsi/ginkgo"
|
||||
"github.com/onsi/ginkgo/config"
|
||||
. "github.com/onsi/ginkgo/internal/remote"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("ForwardingReporter", func() {
|
||||
var (
|
||||
reporter *ForwardingReporter
|
||||
interceptor *fakeOutputInterceptor
|
||||
poster *fakePoster
|
||||
suiteSummary *types.SuiteSummary
|
||||
specSummary *types.SpecSummary
|
||||
setupSummary *types.SetupSummary
|
||||
serverHost string
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
serverHost = "http://127.0.0.1:7788"
|
||||
|
||||
poster = newFakePoster()
|
||||
|
||||
interceptor = &fakeOutputInterceptor{
|
||||
InterceptedOutput: "The intercepted output!",
|
||||
}
|
||||
|
||||
reporter = NewForwardingReporter(serverHost, poster, interceptor)
|
||||
|
||||
suiteSummary = &types.SuiteSummary{
|
||||
SuiteDescription: "My Test Suite",
|
||||
}
|
||||
|
||||
setupSummary = &types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
}
|
||||
|
||||
specSummary = &types.SpecSummary{
|
||||
ComponentTexts: []string{"My", "Spec"},
|
||||
State: types.SpecStatePassed,
|
||||
}
|
||||
})
|
||||
|
||||
Context("When a suite begins", func() {
|
||||
BeforeEach(func() {
|
||||
reporter.SpecSuiteWillBegin(config.GinkgoConfig, suiteSummary)
|
||||
})
|
||||
|
||||
It("should start intercepting output", func() {
|
||||
Ω(interceptor.DidStartInterceptingOutput).Should(BeTrue())
|
||||
})
|
||||
|
||||
It("should POST the SuiteSummary and Ginkgo Config to the Ginkgo server", func() {
|
||||
Ω(poster.posts).Should(HaveLen(1))
|
||||
Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/SpecSuiteWillBegin"))
|
||||
Ω(poster.posts[0].bodyType).Should(Equal("application/json"))
|
||||
|
||||
var sentData struct {
|
||||
SentConfig config.GinkgoConfigType `json:"config"`
|
||||
SentSuiteSummary *types.SuiteSummary `json:"suite-summary"`
|
||||
}
|
||||
|
||||
err := json.Unmarshal(poster.posts[0].bodyContent, &sentData)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(sentData.SentConfig).Should(Equal(config.GinkgoConfig))
|
||||
Ω(sentData.SentSuiteSummary).Should(Equal(suiteSummary))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a BeforeSuite completes", func() {
|
||||
BeforeEach(func() {
|
||||
reporter.BeforeSuiteDidRun(setupSummary)
|
||||
})
|
||||
|
||||
It("should stop, then start intercepting output", func() {
|
||||
Ω(interceptor.DidStopInterceptingOutput).Should(BeTrue())
|
||||
Ω(interceptor.DidStartInterceptingOutput).Should(BeTrue())
|
||||
})
|
||||
|
||||
It("should POST the SetupSummary to the Ginkgo server", func() {
|
||||
Ω(poster.posts).Should(HaveLen(1))
|
||||
Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/BeforeSuiteDidRun"))
|
||||
Ω(poster.posts[0].bodyType).Should(Equal("application/json"))
|
||||
|
||||
var summary *types.SetupSummary
|
||||
err := json.Unmarshal(poster.posts[0].bodyContent, &summary)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
setupSummary.CapturedOutput = interceptor.InterceptedOutput
|
||||
Ω(summary).Should(Equal(setupSummary))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when an AfterSuite completes", func() {
|
||||
BeforeEach(func() {
|
||||
reporter.AfterSuiteDidRun(setupSummary)
|
||||
})
|
||||
|
||||
It("should stop, then start intercepting output", func() {
|
||||
Ω(interceptor.DidStopInterceptingOutput).Should(BeTrue())
|
||||
Ω(interceptor.DidStartInterceptingOutput).Should(BeTrue())
|
||||
})
|
||||
|
||||
It("should POST the SetupSummary to the Ginkgo server", func() {
|
||||
Ω(poster.posts).Should(HaveLen(1))
|
||||
Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/AfterSuiteDidRun"))
|
||||
Ω(poster.posts[0].bodyType).Should(Equal("application/json"))
|
||||
|
||||
var summary *types.SetupSummary
|
||||
err := json.Unmarshal(poster.posts[0].bodyContent, &summary)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
setupSummary.CapturedOutput = interceptor.InterceptedOutput
|
||||
Ω(summary).Should(Equal(setupSummary))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When a spec will run", func() {
|
||||
BeforeEach(func() {
|
||||
reporter.SpecWillRun(specSummary)
|
||||
})
|
||||
|
||||
It("should POST the SpecSummary to the Ginkgo server", func() {
|
||||
Ω(poster.posts).Should(HaveLen(1))
|
||||
Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/SpecWillRun"))
|
||||
Ω(poster.posts[0].bodyType).Should(Equal("application/json"))
|
||||
|
||||
var summary *types.SpecSummary
|
||||
err := json.Unmarshal(poster.posts[0].bodyContent, &summary)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(summary).Should(Equal(specSummary))
|
||||
})
|
||||
|
||||
Context("When a spec completes", func() {
|
||||
BeforeEach(func() {
|
||||
specSummary.State = types.SpecStatePanicked
|
||||
reporter.SpecDidComplete(specSummary)
|
||||
})
|
||||
|
||||
It("should POST the SpecSummary to the Ginkgo server and include any intercepted output", func() {
|
||||
Ω(poster.posts).Should(HaveLen(2))
|
||||
Ω(poster.posts[1].url).Should(Equal("http://127.0.0.1:7788/SpecDidComplete"))
|
||||
Ω(poster.posts[1].bodyType).Should(Equal("application/json"))
|
||||
|
||||
var summary *types.SpecSummary
|
||||
err := json.Unmarshal(poster.posts[1].bodyContent, &summary)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
specSummary.CapturedOutput = interceptor.InterceptedOutput
|
||||
Ω(summary).Should(Equal(specSummary))
|
||||
})
|
||||
|
||||
It("should stop, then start intercepting output", func() {
|
||||
Ω(interceptor.DidStopInterceptingOutput).Should(BeTrue())
|
||||
Ω(interceptor.DidStartInterceptingOutput).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("When a suite ends", func() {
|
||||
BeforeEach(func() {
|
||||
reporter.SpecSuiteDidEnd(suiteSummary)
|
||||
})
|
||||
|
||||
It("should POST the SuiteSummary to the Ginkgo server", func() {
|
||||
Ω(poster.posts).Should(HaveLen(1))
|
||||
Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/SpecSuiteDidEnd"))
|
||||
Ω(poster.posts[0].bodyType).Should(Equal("application/json"))
|
||||
|
||||
var summary *types.SuiteSummary
|
||||
|
||||
err := json.Unmarshal(poster.posts[0].bodyContent, &summary)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(summary).Should(Equal(suiteSummary))
|
||||
})
|
||||
})
|
||||
})
|
||||
64
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_test.go
generated
vendored
64
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_test.go
generated
vendored
@@ -1,64 +0,0 @@
|
||||
package remote_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/remote"
|
||||
. "github.com/onsi/gomega"
|
||||
"os"
|
||||
)
|
||||
|
||||
var _ = Describe("OutputInterceptor", func() {
|
||||
var interceptor OutputInterceptor
|
||||
|
||||
BeforeEach(func() {
|
||||
interceptor = NewOutputInterceptor()
|
||||
})
|
||||
|
||||
It("should capture all stdout/stderr output", func() {
|
||||
err := interceptor.StartInterceptingOutput()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
fmt.Fprint(os.Stdout, "STDOUT")
|
||||
fmt.Fprint(os.Stderr, "STDERR")
|
||||
print("PRINT")
|
||||
|
||||
output, err := interceptor.StopInterceptingAndReturnOutput()
|
||||
|
||||
Ω(output).Should(Equal("STDOUTSTDERRPRINT"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should error if told to intercept output twice", func() {
|
||||
err := interceptor.StartInterceptingOutput()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
print("A")
|
||||
|
||||
err = interceptor.StartInterceptingOutput()
|
||||
Ω(err).Should(HaveOccurred())
|
||||
|
||||
print("B")
|
||||
|
||||
output, err := interceptor.StopInterceptingAndReturnOutput()
|
||||
|
||||
Ω(output).Should(Equal("AB"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should allow multiple interception sessions", func() {
|
||||
err := interceptor.StartInterceptingOutput()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
print("A")
|
||||
output, err := interceptor.StopInterceptingAndReturnOutput()
|
||||
Ω(output).Should(Equal("A"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
err = interceptor.StartInterceptingOutput()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
print("B")
|
||||
output, err = interceptor.StopInterceptingAndReturnOutput()
|
||||
Ω(output).Should(Equal("B"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/remote_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/remote_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package remote_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRemote(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Remote Spec Forwarding Suite")
|
||||
}
|
||||
269
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/server_test.go
generated
vendored
269
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/server_test.go
generated
vendored
@@ -1,269 +0,0 @@
|
||||
package remote_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/remote"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/onsi/ginkgo/config"
|
||||
"github.com/onsi/ginkgo/reporters"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
var _ = Describe("Server", func() {
|
||||
var (
|
||||
server *Server
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
var err error
|
||||
server, err = NewServer(3)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
server.Start()
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
server.Close()
|
||||
})
|
||||
|
||||
Describe("Streaming endpoints", func() {
|
||||
var (
|
||||
reporterA, reporterB *reporters.FakeReporter
|
||||
forwardingReporter *ForwardingReporter
|
||||
|
||||
suiteSummary *types.SuiteSummary
|
||||
setupSummary *types.SetupSummary
|
||||
specSummary *types.SpecSummary
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
reporterA = reporters.NewFakeReporter()
|
||||
reporterB = reporters.NewFakeReporter()
|
||||
|
||||
server.RegisterReporters(reporterA, reporterB)
|
||||
|
||||
forwardingReporter = NewForwardingReporter(server.Address(), &http.Client{}, &fakeOutputInterceptor{})
|
||||
|
||||
suiteSummary = &types.SuiteSummary{
|
||||
SuiteDescription: "My Test Suite",
|
||||
}
|
||||
|
||||
setupSummary = &types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
}
|
||||
|
||||
specSummary = &types.SpecSummary{
|
||||
ComponentTexts: []string{"My", "Spec"},
|
||||
State: types.SpecStatePassed,
|
||||
}
|
||||
})
|
||||
|
||||
It("should make its address available", func() {
|
||||
Ω(server.Address()).Should(MatchRegexp(`http://127.0.0.1:\d{2,}`))
|
||||
})
|
||||
|
||||
Describe("/SpecSuiteWillBegin", func() {
|
||||
It("should decode and forward the Ginkgo config and suite summary", func(done Done) {
|
||||
forwardingReporter.SpecSuiteWillBegin(config.GinkgoConfig, suiteSummary)
|
||||
Ω(reporterA.Config).Should(Equal(config.GinkgoConfig))
|
||||
Ω(reporterB.Config).Should(Equal(config.GinkgoConfig))
|
||||
Ω(reporterA.BeginSummary).Should(Equal(suiteSummary))
|
||||
Ω(reporterB.BeginSummary).Should(Equal(suiteSummary))
|
||||
close(done)
|
||||
})
|
||||
})
|
||||
|
||||
Describe("/BeforeSuiteDidRun", func() {
|
||||
It("should decode and forward the setup summary", func() {
|
||||
forwardingReporter.BeforeSuiteDidRun(setupSummary)
|
||||
Ω(reporterA.BeforeSuiteSummary).Should(Equal(setupSummary))
|
||||
Ω(reporterB.BeforeSuiteSummary).Should(Equal(setupSummary))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("/AfterSuiteDidRun", func() {
|
||||
It("should decode and forward the setup summary", func() {
|
||||
forwardingReporter.AfterSuiteDidRun(setupSummary)
|
||||
Ω(reporterA.AfterSuiteSummary).Should(Equal(setupSummary))
|
||||
Ω(reporterB.AfterSuiteSummary).Should(Equal(setupSummary))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("/SpecWillRun", func() {
|
||||
It("should decode and forward the spec summary", func(done Done) {
|
||||
forwardingReporter.SpecWillRun(specSummary)
|
||||
Ω(reporterA.SpecWillRunSummaries[0]).Should(Equal(specSummary))
|
||||
Ω(reporterB.SpecWillRunSummaries[0]).Should(Equal(specSummary))
|
||||
close(done)
|
||||
})
|
||||
})
|
||||
|
||||
Describe("/SpecDidComplete", func() {
|
||||
It("should decode and forward the spec summary", func(done Done) {
|
||||
forwardingReporter.SpecDidComplete(specSummary)
|
||||
Ω(reporterA.SpecSummaries[0]).Should(Equal(specSummary))
|
||||
Ω(reporterB.SpecSummaries[0]).Should(Equal(specSummary))
|
||||
close(done)
|
||||
})
|
||||
})
|
||||
|
||||
Describe("/SpecSuiteDidEnd", func() {
|
||||
It("should decode and forward the suite summary", func(done Done) {
|
||||
forwardingReporter.SpecSuiteDidEnd(suiteSummary)
|
||||
Ω(reporterA.EndSummary).Should(Equal(suiteSummary))
|
||||
Ω(reporterB.EndSummary).Should(Equal(suiteSummary))
|
||||
close(done)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Synchronization endpoints", func() {
|
||||
Describe("GETting and POSTing BeforeSuiteState", func() {
|
||||
getBeforeSuite := func() types.RemoteBeforeSuiteData {
|
||||
resp, err := http.Get(server.Address() + "/BeforeSuiteState")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusOK))
|
||||
|
||||
r := types.RemoteBeforeSuiteData{}
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
err = decoder.Decode(&r)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
postBeforeSuite := func(r types.RemoteBeforeSuiteData) {
|
||||
resp, err := http.Post(server.Address()+"/BeforeSuiteState", "application/json", bytes.NewReader(r.ToJSON()))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusOK))
|
||||
}
|
||||
|
||||
Context("when the first node's Alive has not been registered yet", func() {
|
||||
It("should return pending", func() {
|
||||
state := getBeforeSuite()
|
||||
Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}))
|
||||
|
||||
state = getBeforeSuite()
|
||||
Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the first node is Alive but has not responded yet", func() {
|
||||
BeforeEach(func() {
|
||||
server.RegisterAlive(1, func() bool {
|
||||
return true
|
||||
})
|
||||
})
|
||||
|
||||
It("should return pending", func() {
|
||||
state := getBeforeSuite()
|
||||
Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}))
|
||||
|
||||
state = getBeforeSuite()
|
||||
Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the first node has responded", func() {
|
||||
var state types.RemoteBeforeSuiteData
|
||||
BeforeEach(func() {
|
||||
server.RegisterAlive(1, func() bool {
|
||||
return false
|
||||
})
|
||||
|
||||
state = types.RemoteBeforeSuiteData{
|
||||
Data: []byte("my data"),
|
||||
State: types.RemoteBeforeSuiteStatePassed,
|
||||
}
|
||||
postBeforeSuite(state)
|
||||
})
|
||||
|
||||
It("should return the passed in state", func() {
|
||||
returnedState := getBeforeSuite()
|
||||
Ω(returnedState).Should(Equal(state))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the first node is no longer Alive and has not responded yet", func() {
|
||||
BeforeEach(func() {
|
||||
server.RegisterAlive(1, func() bool {
|
||||
return false
|
||||
})
|
||||
})
|
||||
|
||||
It("should return disappeared", func() {
|
||||
state := getBeforeSuite()
|
||||
Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStateDisappeared}))
|
||||
|
||||
state = getBeforeSuite()
|
||||
Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStateDisappeared}))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GETting RemoteAfterSuiteData", func() {
|
||||
getRemoteAfterSuiteData := func() bool {
|
||||
resp, err := http.Get(server.Address() + "/RemoteAfterSuiteData")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusOK))
|
||||
|
||||
a := types.RemoteAfterSuiteData{}
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
err = decoder.Decode(&a)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
return a.CanRun
|
||||
}
|
||||
|
||||
Context("when there are unregistered nodes", func() {
|
||||
BeforeEach(func() {
|
||||
server.RegisterAlive(2, func() bool {
|
||||
return false
|
||||
})
|
||||
})
|
||||
|
||||
It("should return false", func() {
|
||||
Ω(getRemoteAfterSuiteData()).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when all none-node-1 nodes are still running", func() {
|
||||
BeforeEach(func() {
|
||||
server.RegisterAlive(2, func() bool {
|
||||
return true
|
||||
})
|
||||
|
||||
server.RegisterAlive(3, func() bool {
|
||||
return false
|
||||
})
|
||||
})
|
||||
|
||||
It("should return false", func() {
|
||||
Ω(getRemoteAfterSuiteData()).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when all none-1 nodes are done", func() {
|
||||
BeforeEach(func() {
|
||||
server.RegisterAlive(2, func() bool {
|
||||
return false
|
||||
})
|
||||
|
||||
server.RegisterAlive(3, func() bool {
|
||||
return false
|
||||
})
|
||||
})
|
||||
|
||||
It("should return true", func() {
|
||||
Ω(getRemoteAfterSuiteData()).Should(BeTrue())
|
||||
})
|
||||
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
149
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/index_computer_test.go
generated
vendored
149
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/index_computer_test.go
generated
vendored
@@ -1,149 +0,0 @@
|
||||
package spec_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/spec"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("ParallelizedIndexRange", func() {
|
||||
var startIndex, count int
|
||||
|
||||
It("should return the correct index range for 4 tests on 2 nodes", func() {
|
||||
startIndex, count = ParallelizedIndexRange(4, 2, 1)
|
||||
Ω(startIndex).Should(Equal(0))
|
||||
Ω(count).Should(Equal(2))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(4, 2, 2)
|
||||
Ω(startIndex).Should(Equal(2))
|
||||
Ω(count).Should(Equal(2))
|
||||
})
|
||||
|
||||
It("should return the correct index range for 5 tests on 2 nodes", func() {
|
||||
startIndex, count = ParallelizedIndexRange(5, 2, 1)
|
||||
Ω(startIndex).Should(Equal(0))
|
||||
Ω(count).Should(Equal(3))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 2, 2)
|
||||
Ω(startIndex).Should(Equal(3))
|
||||
Ω(count).Should(Equal(2))
|
||||
})
|
||||
|
||||
It("should return the correct index range for 5 tests on 3 nodes", func() {
|
||||
startIndex, count = ParallelizedIndexRange(5, 3, 1)
|
||||
Ω(startIndex).Should(Equal(0))
|
||||
Ω(count).Should(Equal(2))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 3, 2)
|
||||
Ω(startIndex).Should(Equal(2))
|
||||
Ω(count).Should(Equal(2))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 3, 3)
|
||||
Ω(startIndex).Should(Equal(4))
|
||||
Ω(count).Should(Equal(1))
|
||||
})
|
||||
|
||||
It("should return the correct index range for 5 tests on 4 nodes", func() {
|
||||
startIndex, count = ParallelizedIndexRange(5, 4, 1)
|
||||
Ω(startIndex).Should(Equal(0))
|
||||
Ω(count).Should(Equal(2))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 4, 2)
|
||||
Ω(startIndex).Should(Equal(2))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 4, 3)
|
||||
Ω(startIndex).Should(Equal(3))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 4, 4)
|
||||
Ω(startIndex).Should(Equal(4))
|
||||
Ω(count).Should(Equal(1))
|
||||
})
|
||||
|
||||
It("should return the correct index range for 5 tests on 5 nodes", func() {
|
||||
startIndex, count = ParallelizedIndexRange(5, 5, 1)
|
||||
Ω(startIndex).Should(Equal(0))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 5, 2)
|
||||
Ω(startIndex).Should(Equal(1))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 5, 3)
|
||||
Ω(startIndex).Should(Equal(2))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 5, 4)
|
||||
Ω(startIndex).Should(Equal(3))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 5, 5)
|
||||
Ω(startIndex).Should(Equal(4))
|
||||
Ω(count).Should(Equal(1))
|
||||
})
|
||||
|
||||
It("should return the correct index range for 5 tests on 6 nodes", func() {
|
||||
startIndex, count = ParallelizedIndexRange(5, 6, 1)
|
||||
Ω(startIndex).Should(Equal(0))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 6, 2)
|
||||
Ω(startIndex).Should(Equal(1))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 6, 3)
|
||||
Ω(startIndex).Should(Equal(2))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 6, 4)
|
||||
Ω(startIndex).Should(Equal(3))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 6, 5)
|
||||
Ω(startIndex).Should(Equal(4))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 6, 6)
|
||||
Ω(count).Should(Equal(0))
|
||||
})
|
||||
|
||||
It("should return the correct index range for 5 tests on 7 nodes", func() {
|
||||
startIndex, count = ParallelizedIndexRange(5, 7, 6)
|
||||
Ω(count).Should(Equal(0))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(5, 7, 7)
|
||||
Ω(count).Should(Equal(0))
|
||||
})
|
||||
|
||||
It("should return the correct index range for 11 tests on 7 nodes", func() {
|
||||
startIndex, count = ParallelizedIndexRange(11, 7, 1)
|
||||
Ω(startIndex).Should(Equal(0))
|
||||
Ω(count).Should(Equal(2))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(11, 7, 2)
|
||||
Ω(startIndex).Should(Equal(2))
|
||||
Ω(count).Should(Equal(2))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(11, 7, 3)
|
||||
Ω(startIndex).Should(Equal(4))
|
||||
Ω(count).Should(Equal(2))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(11, 7, 4)
|
||||
Ω(startIndex).Should(Equal(6))
|
||||
Ω(count).Should(Equal(2))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(11, 7, 5)
|
||||
Ω(startIndex).Should(Equal(8))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(11, 7, 6)
|
||||
Ω(startIndex).Should(Equal(9))
|
||||
Ω(count).Should(Equal(1))
|
||||
|
||||
startIndex, count = ParallelizedIndexRange(11, 7, 7)
|
||||
Ω(startIndex).Should(Equal(10))
|
||||
Ω(count).Should(Equal(1))
|
||||
})
|
||||
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package spec_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSpec(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Spec Suite")
|
||||
}
|
||||
626
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_test.go
generated
vendored
626
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_test.go
generated
vendored
@@ -1,626 +0,0 @@
|
||||
package spec_test
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gbytes"
|
||||
|
||||
. "github.com/onsi/ginkgo/internal/spec"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
"github.com/onsi/ginkgo/internal/containernode"
|
||||
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||
"github.com/onsi/ginkgo/internal/leafnodes"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
)
|
||||
|
||||
var noneFlag = types.FlagTypeNone
|
||||
var focusedFlag = types.FlagTypeFocused
|
||||
var pendingFlag = types.FlagTypePending
|
||||
|
||||
var _ = Describe("Spec", func() {
|
||||
var (
|
||||
failer *Failer.Failer
|
||||
codeLocation types.CodeLocation
|
||||
nodesThatRan []string
|
||||
spec *Spec
|
||||
buffer *gbytes.Buffer
|
||||
)
|
||||
|
||||
newBody := func(text string, fail bool) func() {
|
||||
return func() {
|
||||
nodesThatRan = append(nodesThatRan, text)
|
||||
if fail {
|
||||
failer.Fail(text, codeLocation)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
newIt := func(text string, flag types.FlagType, fail bool) *leafnodes.ItNode {
|
||||
return leafnodes.NewItNode(text, newBody(text, fail), flag, codeLocation, 0, failer, 0)
|
||||
}
|
||||
|
||||
newItWithBody := func(text string, body interface{}) *leafnodes.ItNode {
|
||||
return leafnodes.NewItNode(text, body, noneFlag, codeLocation, 0, failer, 0)
|
||||
}
|
||||
|
||||
newMeasure := func(text string, flag types.FlagType, fail bool, samples int) *leafnodes.MeasureNode {
|
||||
return leafnodes.NewMeasureNode(text, func(Benchmarker) {
|
||||
nodesThatRan = append(nodesThatRan, text)
|
||||
if fail {
|
||||
failer.Fail(text, codeLocation)
|
||||
}
|
||||
}, flag, codeLocation, samples, failer, 0)
|
||||
}
|
||||
|
||||
newBef := func(text string, fail bool) leafnodes.BasicNode {
|
||||
return leafnodes.NewBeforeEachNode(newBody(text, fail), codeLocation, 0, failer, 0)
|
||||
}
|
||||
|
||||
newAft := func(text string, fail bool) leafnodes.BasicNode {
|
||||
return leafnodes.NewAfterEachNode(newBody(text, fail), codeLocation, 0, failer, 0)
|
||||
}
|
||||
|
||||
newJusBef := func(text string, fail bool) leafnodes.BasicNode {
|
||||
return leafnodes.NewJustBeforeEachNode(newBody(text, fail), codeLocation, 0, failer, 0)
|
||||
}
|
||||
|
||||
newContainer := func(text string, flag types.FlagType, setupNodes ...leafnodes.BasicNode) *containernode.ContainerNode {
|
||||
c := containernode.New(text, flag, codeLocation)
|
||||
for _, node := range setupNodes {
|
||||
c.PushSetupNode(node)
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
containers := func(containers ...*containernode.ContainerNode) []*containernode.ContainerNode {
|
||||
return containers
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
buffer = gbytes.NewBuffer()
|
||||
failer = Failer.New()
|
||||
codeLocation = codelocation.New(0)
|
||||
nodesThatRan = []string{}
|
||||
})
|
||||
|
||||
Describe("marking specs focused and pending", func() {
|
||||
It("should satisfy various caes", func() {
|
||||
cases := []struct {
|
||||
ContainerFlags []types.FlagType
|
||||
SubjectFlag types.FlagType
|
||||
Pending bool
|
||||
Focused bool
|
||||
}{
|
||||
{[]types.FlagType{}, noneFlag, false, false},
|
||||
{[]types.FlagType{}, focusedFlag, false, true},
|
||||
{[]types.FlagType{}, pendingFlag, true, false},
|
||||
{[]types.FlagType{noneFlag}, noneFlag, false, false},
|
||||
{[]types.FlagType{focusedFlag}, noneFlag, false, true},
|
||||
{[]types.FlagType{pendingFlag}, noneFlag, true, false},
|
||||
{[]types.FlagType{noneFlag}, focusedFlag, false, true},
|
||||
{[]types.FlagType{focusedFlag}, focusedFlag, false, true},
|
||||
{[]types.FlagType{pendingFlag}, focusedFlag, true, true},
|
||||
{[]types.FlagType{noneFlag}, pendingFlag, true, false},
|
||||
{[]types.FlagType{focusedFlag}, pendingFlag, true, true},
|
||||
{[]types.FlagType{pendingFlag}, pendingFlag, true, false},
|
||||
{[]types.FlagType{focusedFlag, noneFlag}, noneFlag, false, true},
|
||||
{[]types.FlagType{noneFlag, focusedFlag}, noneFlag, false, true},
|
||||
{[]types.FlagType{pendingFlag, noneFlag}, noneFlag, true, false},
|
||||
{[]types.FlagType{noneFlag, pendingFlag}, noneFlag, true, false},
|
||||
{[]types.FlagType{focusedFlag, pendingFlag}, noneFlag, true, true},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
subject := newIt("it node", c.SubjectFlag, false)
|
||||
containers := []*containernode.ContainerNode{}
|
||||
for _, flag := range c.ContainerFlags {
|
||||
containers = append(containers, newContainer("container", flag))
|
||||
}
|
||||
|
||||
spec := New(subject, containers, false)
|
||||
Ω(spec.Pending()).Should(Equal(c.Pending), "Case %d: %#v", i, c)
|
||||
Ω(spec.Focused()).Should(Equal(c.Focused), "Case %d: %#v", i, c)
|
||||
|
||||
if c.Pending {
|
||||
Ω(spec.Summary("").State).Should(Equal(types.SpecStatePending))
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Skip", func() {
|
||||
It("should be skipped", func() {
|
||||
spec := New(newIt("it node", noneFlag, false), containers(newContainer("container", noneFlag)), false)
|
||||
Ω(spec.Skipped()).Should(BeFalse())
|
||||
spec.Skip()
|
||||
Ω(spec.Skipped()).Should(BeTrue())
|
||||
Ω(spec.Summary("").State).Should(Equal(types.SpecStateSkipped))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("IsMeasurement", func() {
|
||||
It("should be true if the subject is a measurement node", func() {
|
||||
spec := New(newIt("it node", noneFlag, false), containers(newContainer("container", noneFlag)), false)
|
||||
Ω(spec.IsMeasurement()).Should(BeFalse())
|
||||
Ω(spec.Summary("").IsMeasurement).Should(BeFalse())
|
||||
Ω(spec.Summary("").NumberOfSamples).Should(Equal(1))
|
||||
|
||||
spec = New(newMeasure("measure node", noneFlag, false, 10), containers(newContainer("container", noneFlag)), false)
|
||||
Ω(spec.IsMeasurement()).Should(BeTrue())
|
||||
Ω(spec.Summary("").IsMeasurement).Should(BeTrue())
|
||||
Ω(spec.Summary("").NumberOfSamples).Should(Equal(10))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Passed", func() {
|
||||
It("should pass when the subject passed", func() {
|
||||
spec := New(newIt("it node", noneFlag, false), containers(), false)
|
||||
spec.Run(buffer)
|
||||
|
||||
Ω(spec.Passed()).Should(BeTrue())
|
||||
Ω(spec.Failed()).Should(BeFalse())
|
||||
Ω(spec.Summary("").State).Should(Equal(types.SpecStatePassed))
|
||||
Ω(spec.Summary("").Failure).Should(BeZero())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Failed", func() {
|
||||
It("should be failed if the failure was panic", func() {
|
||||
spec := New(newItWithBody("panicky it", func() {
|
||||
panic("bam")
|
||||
}), containers(), false)
|
||||
spec.Run(buffer)
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(spec.Summary("").State).Should(Equal(types.SpecStatePanicked))
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("Test Panicked"))
|
||||
Ω(spec.Summary("").Failure.ForwardedPanic).Should(Equal("bam"))
|
||||
})
|
||||
|
||||
It("should be failed if the failure was a timeout", func() {
|
||||
spec := New(newItWithBody("sleepy it", func(done Done) {}), containers(), false)
|
||||
spec.Run(buffer)
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(spec.Summary("").State).Should(Equal(types.SpecStateTimedOut))
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("Timed out"))
|
||||
})
|
||||
|
||||
It("should be failed if the failure was... a failure", func() {
|
||||
spec := New(newItWithBody("failing it", func() {
|
||||
failer.Fail("bam", codeLocation)
|
||||
}), containers(), false)
|
||||
spec.Run(buffer)
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(spec.Summary("").State).Should(Equal(types.SpecStateFailed))
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("bam"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Concatenated string", func() {
|
||||
It("should concatenate the texts of the containers and the subject", func() {
|
||||
spec := New(
|
||||
newIt("it node", noneFlag, false),
|
||||
containers(
|
||||
newContainer("outer container", noneFlag),
|
||||
newContainer("inner container", noneFlag),
|
||||
),
|
||||
false,
|
||||
)
|
||||
|
||||
Ω(spec.ConcatenatedString()).Should(Equal("outer container inner container it node"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("running it specs", func() {
|
||||
Context("with just an it", func() {
|
||||
Context("that succeeds", func() {
|
||||
It("should run the it and report on its success", func() {
|
||||
spec := New(newIt("it node", noneFlag, false), containers(), false)
|
||||
spec.Run(buffer)
|
||||
Ω(spec.Passed()).Should(BeTrue())
|
||||
Ω(spec.Failed()).Should(BeFalse())
|
||||
Ω(nodesThatRan).Should(Equal([]string{"it node"}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("that fails", func() {
|
||||
It("should run the it and report on its success", func() {
|
||||
spec := New(newIt("it node", noneFlag, true), containers(), false)
|
||||
spec.Run(buffer)
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("it node"))
|
||||
Ω(nodesThatRan).Should(Equal([]string{"it node"}))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("with a full set of setup nodes", func() {
|
||||
var failingNodes map[string]bool
|
||||
|
||||
BeforeEach(func() {
|
||||
failingNodes = map[string]bool{}
|
||||
})
|
||||
|
||||
JustBeforeEach(func() {
|
||||
spec = New(
|
||||
newIt("it node", noneFlag, failingNodes["it node"]),
|
||||
containers(
|
||||
newContainer("outer container", noneFlag,
|
||||
newBef("outer bef A", failingNodes["outer bef A"]),
|
||||
newBef("outer bef B", failingNodes["outer bef B"]),
|
||||
newJusBef("outer jusbef A", failingNodes["outer jusbef A"]),
|
||||
newJusBef("outer jusbef B", failingNodes["outer jusbef B"]),
|
||||
newAft("outer aft A", failingNodes["outer aft A"]),
|
||||
newAft("outer aft B", failingNodes["outer aft B"]),
|
||||
),
|
||||
newContainer("inner container", noneFlag,
|
||||
newBef("inner bef A", failingNodes["inner bef A"]),
|
||||
newBef("inner bef B", failingNodes["inner bef B"]),
|
||||
newJusBef("inner jusbef A", failingNodes["inner jusbef A"]),
|
||||
newJusBef("inner jusbef B", failingNodes["inner jusbef B"]),
|
||||
newAft("inner aft A", failingNodes["inner aft A"]),
|
||||
newAft("inner aft B", failingNodes["inner aft B"]),
|
||||
),
|
||||
),
|
||||
false,
|
||||
)
|
||||
spec.Run(buffer)
|
||||
})
|
||||
|
||||
Context("that all pass", func() {
|
||||
It("should walk through the nodes in the correct order", func() {
|
||||
Ω(spec.Passed()).Should(BeTrue())
|
||||
Ω(spec.Failed()).Should(BeFalse())
|
||||
Ω(nodesThatRan).Should(Equal([]string{
|
||||
"outer bef A",
|
||||
"outer bef B",
|
||||
"inner bef A",
|
||||
"inner bef B",
|
||||
"outer jusbef A",
|
||||
"outer jusbef B",
|
||||
"inner jusbef A",
|
||||
"inner jusbef B",
|
||||
"it node",
|
||||
"inner aft A",
|
||||
"inner aft B",
|
||||
"outer aft A",
|
||||
"outer aft B",
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the subject fails", func() {
|
||||
BeforeEach(func() {
|
||||
failingNodes["it node"] = true
|
||||
})
|
||||
|
||||
It("should run the afters", func() {
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(nodesThatRan).Should(Equal([]string{
|
||||
"outer bef A",
|
||||
"outer bef B",
|
||||
"inner bef A",
|
||||
"inner bef B",
|
||||
"outer jusbef A",
|
||||
"outer jusbef B",
|
||||
"inner jusbef A",
|
||||
"inner jusbef B",
|
||||
"it node",
|
||||
"inner aft A",
|
||||
"inner aft B",
|
||||
"outer aft A",
|
||||
"outer aft B",
|
||||
}))
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("it node"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when an inner before fails", func() {
|
||||
BeforeEach(func() {
|
||||
failingNodes["inner bef A"] = true
|
||||
})
|
||||
|
||||
It("should not run any other befores, but it should run the subsequent afters", func() {
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(nodesThatRan).Should(Equal([]string{
|
||||
"outer bef A",
|
||||
"outer bef B",
|
||||
"inner bef A",
|
||||
"inner aft A",
|
||||
"inner aft B",
|
||||
"outer aft A",
|
||||
"outer aft B",
|
||||
}))
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("inner bef A"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when an outer before fails", func() {
|
||||
BeforeEach(func() {
|
||||
failingNodes["outer bef B"] = true
|
||||
})
|
||||
|
||||
It("should not run any other befores, but it should run the subsequent afters", func() {
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(nodesThatRan).Should(Equal([]string{
|
||||
"outer bef A",
|
||||
"outer bef B",
|
||||
"outer aft A",
|
||||
"outer aft B",
|
||||
}))
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("outer bef B"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when an after fails", func() {
|
||||
BeforeEach(func() {
|
||||
failingNodes["inner aft B"] = true
|
||||
})
|
||||
|
||||
It("should run all other afters, but mark the test as failed", func() {
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(nodesThatRan).Should(Equal([]string{
|
||||
"outer bef A",
|
||||
"outer bef B",
|
||||
"inner bef A",
|
||||
"inner bef B",
|
||||
"outer jusbef A",
|
||||
"outer jusbef B",
|
||||
"inner jusbef A",
|
||||
"inner jusbef B",
|
||||
"it node",
|
||||
"inner aft A",
|
||||
"inner aft B",
|
||||
"outer aft A",
|
||||
"outer aft B",
|
||||
}))
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("inner aft B"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a just before each fails", func() {
|
||||
BeforeEach(func() {
|
||||
failingNodes["outer jusbef B"] = true
|
||||
})
|
||||
|
||||
It("should run the afters, but not the subject", func() {
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(nodesThatRan).Should(Equal([]string{
|
||||
"outer bef A",
|
||||
"outer bef B",
|
||||
"inner bef A",
|
||||
"inner bef B",
|
||||
"outer jusbef A",
|
||||
"outer jusbef B",
|
||||
"inner aft A",
|
||||
"inner aft B",
|
||||
"outer aft A",
|
||||
"outer aft B",
|
||||
}))
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("outer jusbef B"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when an after fails after an earlier node has failed", func() {
|
||||
BeforeEach(func() {
|
||||
failingNodes["it node"] = true
|
||||
failingNodes["inner aft B"] = true
|
||||
})
|
||||
|
||||
It("should record the earlier failure", func() {
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(nodesThatRan).Should(Equal([]string{
|
||||
"outer bef A",
|
||||
"outer bef B",
|
||||
"inner bef A",
|
||||
"inner bef B",
|
||||
"outer jusbef A",
|
||||
"outer jusbef B",
|
||||
"inner jusbef A",
|
||||
"inner jusbef B",
|
||||
"it node",
|
||||
"inner aft A",
|
||||
"inner aft B",
|
||||
"outer aft A",
|
||||
"outer aft B",
|
||||
}))
|
||||
Ω(spec.Summary("").Failure.Message).Should(Equal("it node"))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("running measurement specs", func() {
|
||||
Context("when the measurement succeeds", func() {
|
||||
It("should run N samples", func() {
|
||||
spec = New(
|
||||
newMeasure("measure node", noneFlag, false, 3),
|
||||
containers(
|
||||
newContainer("container", noneFlag,
|
||||
newBef("bef A", false),
|
||||
newJusBef("jusbef A", false),
|
||||
newAft("aft A", false),
|
||||
),
|
||||
),
|
||||
false,
|
||||
)
|
||||
spec.Run(buffer)
|
||||
|
||||
Ω(spec.Passed()).Should(BeTrue())
|
||||
Ω(spec.Failed()).Should(BeFalse())
|
||||
Ω(nodesThatRan).Should(Equal([]string{
|
||||
"bef A",
|
||||
"jusbef A",
|
||||
"measure node",
|
||||
"aft A",
|
||||
"bef A",
|
||||
"jusbef A",
|
||||
"measure node",
|
||||
"aft A",
|
||||
"bef A",
|
||||
"jusbef A",
|
||||
"measure node",
|
||||
"aft A",
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the measurement fails", func() {
|
||||
It("should bail after the failure occurs", func() {
|
||||
spec = New(
|
||||
newMeasure("measure node", noneFlag, true, 3),
|
||||
containers(
|
||||
newContainer("container", noneFlag,
|
||||
newBef("bef A", false),
|
||||
newJusBef("jusbef A", false),
|
||||
newAft("aft A", false),
|
||||
),
|
||||
),
|
||||
false,
|
||||
)
|
||||
spec.Run(buffer)
|
||||
|
||||
Ω(spec.Passed()).Should(BeFalse())
|
||||
Ω(spec.Failed()).Should(BeTrue())
|
||||
Ω(nodesThatRan).Should(Equal([]string{
|
||||
"bef A",
|
||||
"jusbef A",
|
||||
"measure node",
|
||||
"aft A",
|
||||
}))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Summary", func() {
|
||||
var (
|
||||
subjectCodeLocation types.CodeLocation
|
||||
outerContainerCodeLocation types.CodeLocation
|
||||
innerContainerCodeLocation types.CodeLocation
|
||||
summary *types.SpecSummary
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
subjectCodeLocation = codelocation.New(0)
|
||||
outerContainerCodeLocation = codelocation.New(0)
|
||||
innerContainerCodeLocation = codelocation.New(0)
|
||||
|
||||
spec = New(
|
||||
leafnodes.NewItNode("it node", func() {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}, noneFlag, subjectCodeLocation, 0, failer, 0),
|
||||
containers(
|
||||
containernode.New("outer container", noneFlag, outerContainerCodeLocation),
|
||||
containernode.New("inner container", noneFlag, innerContainerCodeLocation),
|
||||
),
|
||||
false,
|
||||
)
|
||||
|
||||
spec.Run(buffer)
|
||||
Ω(spec.Passed()).Should(BeTrue())
|
||||
summary = spec.Summary("suite id")
|
||||
})
|
||||
|
||||
It("should have the suite id", func() {
|
||||
Ω(summary.SuiteID).Should(Equal("suite id"))
|
||||
})
|
||||
|
||||
It("should have the component texts and code locations", func() {
|
||||
Ω(summary.ComponentTexts).Should(Equal([]string{"outer container", "inner container", "it node"}))
|
||||
Ω(summary.ComponentCodeLocations).Should(Equal([]types.CodeLocation{outerContainerCodeLocation, innerContainerCodeLocation, subjectCodeLocation}))
|
||||
})
|
||||
|
||||
It("should have a runtime", func() {
|
||||
Ω(summary.RunTime).Should(BeNumerically(">=", 10*time.Millisecond))
|
||||
})
|
||||
|
||||
It("should not be a measurement, or have a measurement summary", func() {
|
||||
Ω(summary.IsMeasurement).Should(BeFalse())
|
||||
Ω(summary.Measurements).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Summaries for measurements", func() {
|
||||
var summary *types.SpecSummary
|
||||
|
||||
BeforeEach(func() {
|
||||
spec = New(leafnodes.NewMeasureNode("measure node", func(b Benchmarker) {
|
||||
b.RecordValue("a value", 7, "some info")
|
||||
}, noneFlag, codeLocation, 4, failer, 0), containers(), false)
|
||||
spec.Run(buffer)
|
||||
Ω(spec.Passed()).Should(BeTrue())
|
||||
summary = spec.Summary("suite id")
|
||||
})
|
||||
|
||||
It("should include the number of samples", func() {
|
||||
Ω(summary.NumberOfSamples).Should(Equal(4))
|
||||
})
|
||||
|
||||
It("should be a measurement", func() {
|
||||
Ω(summary.IsMeasurement).Should(BeTrue())
|
||||
})
|
||||
|
||||
It("should have the measurements report", func() {
|
||||
Ω(summary.Measurements).Should(HaveKey("a value"))
|
||||
|
||||
report := summary.Measurements["a value"]
|
||||
Ω(report.Name).Should(Equal("a value"))
|
||||
Ω(report.Info).Should(Equal("some info"))
|
||||
Ω(report.Results).Should(Equal([]float64{7, 7, 7, 7}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("When told to emit progress", func() {
|
||||
It("should emit progress to the writer as it runs Befores, JustBefores, Afters, and Its", func() {
|
||||
spec = New(
|
||||
newIt("it node", noneFlag, false),
|
||||
containers(
|
||||
newContainer("outer container", noneFlag,
|
||||
newBef("outer bef A", false),
|
||||
newJusBef("outer jusbef A", false),
|
||||
newAft("outer aft A", false),
|
||||
),
|
||||
newContainer("inner container", noneFlag,
|
||||
newBef("inner bef A", false),
|
||||
newJusBef("inner jusbef A", false),
|
||||
newAft("inner aft A", false),
|
||||
),
|
||||
),
|
||||
true,
|
||||
)
|
||||
spec.Run(buffer)
|
||||
|
||||
Ω(buffer).Should(gbytes.Say(`\[BeforeEach\] outer container`))
|
||||
Ω(buffer).Should(gbytes.Say(`\[BeforeEach\] inner container`))
|
||||
Ω(buffer).Should(gbytes.Say(`\[JustBeforeEach\] outer container`))
|
||||
Ω(buffer).Should(gbytes.Say(`\[JustBeforeEach\] inner container`))
|
||||
Ω(buffer).Should(gbytes.Say(`\[It\] it node`))
|
||||
Ω(buffer).Should(gbytes.Say(`\[AfterEach\] inner container`))
|
||||
Ω(buffer).Should(gbytes.Say(`\[AfterEach\] outer container`))
|
||||
})
|
||||
|
||||
It("should emit progress to the writer as it runs Befores, JustBefores, Afters, and Measures", func() {
|
||||
spec = New(
|
||||
newMeasure("measure node", noneFlag, false, 2),
|
||||
containers(),
|
||||
true,
|
||||
)
|
||||
spec.Run(buffer)
|
||||
|
||||
Ω(buffer).Should(gbytes.Say(`\[Measure\] measure node`))
|
||||
Ω(buffer).Should(gbytes.Say(`\[Measure\] measure node`))
|
||||
})
|
||||
})
|
||||
})
|
||||
305
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/specs_test.go
generated
vendored
305
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/specs_test.go
generated
vendored
@@ -1,305 +0,0 @@
|
||||
package spec_test
|
||||
|
||||
import (
|
||||
"math/rand"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/spec"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
"github.com/onsi/ginkgo/internal/containernode"
|
||||
"github.com/onsi/ginkgo/internal/leafnodes"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
)
|
||||
|
||||
var _ = Describe("Specs", func() {
|
||||
var specs *Specs
|
||||
|
||||
newSpec := func(text string, flag types.FlagType) *Spec {
|
||||
subject := leafnodes.NewItNode(text, func() {}, flag, codelocation.New(0), 0, nil, 0)
|
||||
return New(subject, []*containernode.ContainerNode{}, false)
|
||||
}
|
||||
|
||||
newMeasureSpec := func(text string, flag types.FlagType) *Spec {
|
||||
subject := leafnodes.NewMeasureNode(text, func(Benchmarker) {}, flag, codelocation.New(0), 0, nil, 0)
|
||||
return New(subject, []*containernode.ContainerNode{}, false)
|
||||
}
|
||||
|
||||
newSpecs := func(args ...interface{}) *Specs {
|
||||
specs := []*Spec{}
|
||||
for index := 0; index < len(args)-1; index += 2 {
|
||||
specs = append(specs, newSpec(args[index].(string), args[index+1].(types.FlagType)))
|
||||
}
|
||||
return NewSpecs(specs)
|
||||
}
|
||||
|
||||
specTexts := func(specs *Specs) []string {
|
||||
texts := []string{}
|
||||
for _, spec := range specs.Specs() {
|
||||
texts = append(texts, spec.ConcatenatedString())
|
||||
}
|
||||
return texts
|
||||
}
|
||||
|
||||
willRunTexts := func(specs *Specs) []string {
|
||||
texts := []string{}
|
||||
for _, spec := range specs.Specs() {
|
||||
if !(spec.Skipped() || spec.Pending()) {
|
||||
texts = append(texts, spec.ConcatenatedString())
|
||||
}
|
||||
}
|
||||
return texts
|
||||
}
|
||||
|
||||
skippedTexts := func(specs *Specs) []string {
|
||||
texts := []string{}
|
||||
for _, spec := range specs.Specs() {
|
||||
if spec.Skipped() {
|
||||
texts = append(texts, spec.ConcatenatedString())
|
||||
}
|
||||
}
|
||||
return texts
|
||||
}
|
||||
|
||||
pendingTexts := func(specs *Specs) []string {
|
||||
texts := []string{}
|
||||
for _, spec := range specs.Specs() {
|
||||
if spec.Pending() {
|
||||
texts = append(texts, spec.ConcatenatedString())
|
||||
}
|
||||
}
|
||||
return texts
|
||||
}
|
||||
|
||||
Describe("Shuffling specs", func() {
|
||||
It("should shuffle the specs using the passed in randomizer", func() {
|
||||
specs17 := newSpecs("C", noneFlag, "A", noneFlag, "B", noneFlag)
|
||||
specs17.Shuffle(rand.New(rand.NewSource(17)))
|
||||
texts17 := specTexts(specs17)
|
||||
|
||||
specs17Again := newSpecs("C", noneFlag, "A", noneFlag, "B", noneFlag)
|
||||
specs17Again.Shuffle(rand.New(rand.NewSource(17)))
|
||||
texts17Again := specTexts(specs17Again)
|
||||
|
||||
specs15 := newSpecs("C", noneFlag, "A", noneFlag, "B", noneFlag)
|
||||
specs15.Shuffle(rand.New(rand.NewSource(15)))
|
||||
texts15 := specTexts(specs15)
|
||||
|
||||
specsUnshuffled := newSpecs("C", noneFlag, "A", noneFlag, "B", noneFlag)
|
||||
textsUnshuffled := specTexts(specsUnshuffled)
|
||||
|
||||
Ω(textsUnshuffled).Should(Equal([]string{"C", "A", "B"}))
|
||||
|
||||
Ω(texts17).Should(Equal(texts17Again))
|
||||
Ω(texts17).ShouldNot(Equal(texts15))
|
||||
Ω(texts17).ShouldNot(Equal(textsUnshuffled))
|
||||
Ω(texts15).ShouldNot(Equal(textsUnshuffled))
|
||||
|
||||
Ω(texts17).Should(HaveLen(3))
|
||||
Ω(texts17).Should(ContainElement("A"))
|
||||
Ω(texts17).Should(ContainElement("B"))
|
||||
Ω(texts17).Should(ContainElement("C"))
|
||||
|
||||
Ω(texts15).Should(HaveLen(3))
|
||||
Ω(texts15).Should(ContainElement("A"))
|
||||
Ω(texts15).Should(ContainElement("B"))
|
||||
Ω(texts15).Should(ContainElement("C"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("with no programmatic focus", func() {
|
||||
BeforeEach(func() {
|
||||
specs = newSpecs("A1", noneFlag, "A2", noneFlag, "B1", noneFlag, "B2", pendingFlag)
|
||||
specs.ApplyFocus("", "", "")
|
||||
})
|
||||
|
||||
It("should not report as having programmatic specs", func() {
|
||||
Ω(specs.HasProgrammaticFocus()).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Applying focus/skip", func() {
|
||||
var description, focusString, skipString string
|
||||
|
||||
BeforeEach(func() {
|
||||
description, focusString, skipString = "", "", ""
|
||||
})
|
||||
|
||||
JustBeforeEach(func() {
|
||||
specs = newSpecs("A1", focusedFlag, "A2", noneFlag, "B1", focusedFlag, "B2", pendingFlag)
|
||||
specs.ApplyFocus(description, focusString, skipString)
|
||||
})
|
||||
|
||||
Context("with neither a focus string nor a skip string", func() {
|
||||
It("should apply the programmatic focus", func() {
|
||||
Ω(willRunTexts(specs)).Should(Equal([]string{"A1", "B1"}))
|
||||
Ω(skippedTexts(specs)).Should(Equal([]string{"A2", "B2"}))
|
||||
Ω(pendingTexts(specs)).Should(BeEmpty())
|
||||
})
|
||||
|
||||
It("should report as having programmatic specs", func() {
|
||||
Ω(specs.HasProgrammaticFocus()).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("with a focus regexp", func() {
|
||||
BeforeEach(func() {
|
||||
focusString = "A"
|
||||
})
|
||||
|
||||
It("should override the programmatic focus", func() {
|
||||
Ω(willRunTexts(specs)).Should(Equal([]string{"A1", "A2"}))
|
||||
Ω(skippedTexts(specs)).Should(Equal([]string{"B1", "B2"}))
|
||||
Ω(pendingTexts(specs)).Should(BeEmpty())
|
||||
})
|
||||
|
||||
It("should not report as having programmatic specs", func() {
|
||||
Ω(specs.HasProgrammaticFocus()).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("with a focus regexp", func() {
|
||||
BeforeEach(func() {
|
||||
focusString = "B"
|
||||
})
|
||||
|
||||
It("should not override any pendings", func() {
|
||||
Ω(willRunTexts(specs)).Should(Equal([]string{"B1"}))
|
||||
Ω(skippedTexts(specs)).Should(Equal([]string{"A1", "A2"}))
|
||||
Ω(pendingTexts(specs)).Should(Equal([]string{"B2"}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with a description", func() {
|
||||
BeforeEach(func() {
|
||||
description = "C"
|
||||
focusString = "C"
|
||||
})
|
||||
|
||||
It("should include the description in the focus determination", func() {
|
||||
Ω(willRunTexts(specs)).Should(Equal([]string{"A1", "A2", "B1"}))
|
||||
Ω(skippedTexts(specs)).Should(BeEmpty())
|
||||
Ω(pendingTexts(specs)).Should(Equal([]string{"B2"}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with a description", func() {
|
||||
BeforeEach(func() {
|
||||
description = "C"
|
||||
skipString = "C"
|
||||
})
|
||||
|
||||
It("should include the description in the focus determination", func() {
|
||||
Ω(willRunTexts(specs)).Should(BeEmpty())
|
||||
Ω(skippedTexts(specs)).Should(Equal([]string{"A1", "A2", "B1", "B2"}))
|
||||
Ω(pendingTexts(specs)).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("with a skip regexp", func() {
|
||||
BeforeEach(func() {
|
||||
skipString = "A"
|
||||
})
|
||||
|
||||
It("should override the programmatic focus", func() {
|
||||
Ω(willRunTexts(specs)).Should(Equal([]string{"B1"}))
|
||||
Ω(skippedTexts(specs)).Should(Equal([]string{"A1", "A2"}))
|
||||
Ω(pendingTexts(specs)).Should(Equal([]string{"B2"}))
|
||||
})
|
||||
|
||||
It("should not report as having programmatic specs", func() {
|
||||
Ω(specs.HasProgrammaticFocus()).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("with both a focus and a skip regexp", func() {
|
||||
BeforeEach(func() {
|
||||
focusString = "1"
|
||||
skipString = "B"
|
||||
})
|
||||
|
||||
It("should AND the two", func() {
|
||||
Ω(willRunTexts(specs)).Should(Equal([]string{"A1"}))
|
||||
Ω(skippedTexts(specs)).Should(Equal([]string{"A2", "B1", "B2"}))
|
||||
Ω(pendingTexts(specs)).Should(BeEmpty())
|
||||
})
|
||||
|
||||
It("should not report as having programmatic specs", func() {
|
||||
Ω(specs.HasProgrammaticFocus()).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("skipping measurements", func() {
|
||||
BeforeEach(func() {
|
||||
specs = NewSpecs([]*Spec{
|
||||
newSpec("A", noneFlag),
|
||||
newSpec("B", noneFlag),
|
||||
newSpec("C", pendingFlag),
|
||||
newMeasureSpec("measurementA", noneFlag),
|
||||
newMeasureSpec("measurementB", pendingFlag),
|
||||
})
|
||||
})
|
||||
|
||||
It("should skip measurements", func() {
|
||||
Ω(willRunTexts(specs)).Should(Equal([]string{"A", "B", "measurementA"}))
|
||||
Ω(skippedTexts(specs)).Should(BeEmpty())
|
||||
Ω(pendingTexts(specs)).Should(Equal([]string{"C", "measurementB"}))
|
||||
|
||||
specs.SkipMeasurements()
|
||||
|
||||
Ω(willRunTexts(specs)).Should(Equal([]string{"A", "B"}))
|
||||
Ω(skippedTexts(specs)).Should(Equal([]string{"measurementA", "measurementB"}))
|
||||
Ω(pendingTexts(specs)).Should(Equal([]string{"C"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("when running tests in parallel", func() {
|
||||
It("should select out a subset of the tests", func() {
|
||||
specsNode1 := newSpecs("A", noneFlag, "B", noneFlag, "C", noneFlag, "D", noneFlag, "E", noneFlag)
|
||||
specsNode2 := newSpecs("A", noneFlag, "B", noneFlag, "C", noneFlag, "D", noneFlag, "E", noneFlag)
|
||||
specsNode3 := newSpecs("A", noneFlag, "B", noneFlag, "C", noneFlag, "D", noneFlag, "E", noneFlag)
|
||||
|
||||
specsNode1.TrimForParallelization(3, 1)
|
||||
specsNode2.TrimForParallelization(3, 2)
|
||||
specsNode3.TrimForParallelization(3, 3)
|
||||
|
||||
Ω(willRunTexts(specsNode1)).Should(Equal([]string{"A", "B"}))
|
||||
Ω(willRunTexts(specsNode2)).Should(Equal([]string{"C", "D"}))
|
||||
Ω(willRunTexts(specsNode3)).Should(Equal([]string{"E"}))
|
||||
|
||||
Ω(specsNode1.Specs()).Should(HaveLen(2))
|
||||
Ω(specsNode2.Specs()).Should(HaveLen(2))
|
||||
Ω(specsNode3.Specs()).Should(HaveLen(1))
|
||||
|
||||
Ω(specsNode1.NumberOfOriginalSpecs()).Should(Equal(5))
|
||||
Ω(specsNode2.NumberOfOriginalSpecs()).Should(Equal(5))
|
||||
Ω(specsNode3.NumberOfOriginalSpecs()).Should(Equal(5))
|
||||
})
|
||||
|
||||
Context("when way too many nodes are used", func() {
|
||||
It("should return 0 specs", func() {
|
||||
specsNode1 := newSpecs("A", noneFlag, "B", noneFlag)
|
||||
specsNode2 := newSpecs("A", noneFlag, "B", noneFlag)
|
||||
specsNode3 := newSpecs("A", noneFlag, "B", noneFlag)
|
||||
|
||||
specsNode1.TrimForParallelization(3, 1)
|
||||
specsNode2.TrimForParallelization(3, 2)
|
||||
specsNode3.TrimForParallelization(3, 3)
|
||||
|
||||
Ω(willRunTexts(specsNode1)).Should(Equal([]string{"A"}))
|
||||
Ω(willRunTexts(specsNode2)).Should(Equal([]string{"B"}))
|
||||
Ω(willRunTexts(specsNode3)).Should(BeEmpty())
|
||||
|
||||
Ω(specsNode1.Specs()).Should(HaveLen(1))
|
||||
Ω(specsNode2.Specs()).Should(HaveLen(1))
|
||||
Ω(specsNode3.Specs()).Should(HaveLen(0))
|
||||
|
||||
Ω(specsNode1.NumberOfOriginalSpecs()).Should(Equal(2))
|
||||
Ω(specsNode2.NumberOfOriginalSpecs()).Should(Equal(2))
|
||||
Ω(specsNode3.NumberOfOriginalSpecs()).Should(Equal(2))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,13 +0,0 @@
|
||||
package specrunner_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSpecRunner(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Spec Runner Suite")
|
||||
}
|
||||
623
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner_test.go
generated
vendored
623
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner_test.go
generated
vendored
@@ -1,623 +0,0 @@
|
||||
package specrunner_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/specrunner"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/onsi/ginkgo/config"
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
"github.com/onsi/ginkgo/internal/containernode"
|
||||
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||
"github.com/onsi/ginkgo/internal/leafnodes"
|
||||
"github.com/onsi/ginkgo/internal/spec"
|
||||
Writer "github.com/onsi/ginkgo/internal/writer"
|
||||
"github.com/onsi/ginkgo/reporters"
|
||||
)
|
||||
|
||||
var noneFlag = types.FlagTypeNone
|
||||
var focusedFlag = types.FlagTypeFocused
|
||||
var pendingFlag = types.FlagTypePending
|
||||
|
||||
var _ = Describe("Spec Runner", func() {
|
||||
var (
|
||||
reporter1 *reporters.FakeReporter
|
||||
reporter2 *reporters.FakeReporter
|
||||
failer *Failer.Failer
|
||||
writer *Writer.FakeGinkgoWriter
|
||||
|
||||
thingsThatRan []string
|
||||
|
||||
runner *SpecRunner
|
||||
)
|
||||
|
||||
newBefSuite := func(text string, fail bool) leafnodes.SuiteNode {
|
||||
return leafnodes.NewBeforeSuiteNode(func() {
|
||||
writer.AddEvent(text)
|
||||
thingsThatRan = append(thingsThatRan, text)
|
||||
if fail {
|
||||
failer.Fail(text, codelocation.New(0))
|
||||
}
|
||||
}, codelocation.New(0), 0, failer)
|
||||
}
|
||||
|
||||
newAftSuite := func(text string, fail bool) leafnodes.SuiteNode {
|
||||
return leafnodes.NewAfterSuiteNode(func() {
|
||||
writer.AddEvent(text)
|
||||
thingsThatRan = append(thingsThatRan, text)
|
||||
if fail {
|
||||
failer.Fail(text, codelocation.New(0))
|
||||
}
|
||||
}, codelocation.New(0), 0, failer)
|
||||
}
|
||||
|
||||
newSpec := func(text string, flag types.FlagType, fail bool) *spec.Spec {
|
||||
subject := leafnodes.NewItNode(text, func() {
|
||||
writer.AddEvent(text)
|
||||
thingsThatRan = append(thingsThatRan, text)
|
||||
if fail {
|
||||
failer.Fail(text, codelocation.New(0))
|
||||
}
|
||||
}, flag, codelocation.New(0), 0, failer, 0)
|
||||
|
||||
return spec.New(subject, []*containernode.ContainerNode{}, false)
|
||||
}
|
||||
|
||||
newSpecWithBody := func(text string, body interface{}) *spec.Spec {
|
||||
subject := leafnodes.NewItNode(text, body, noneFlag, codelocation.New(0), 0, failer, 0)
|
||||
|
||||
return spec.New(subject, []*containernode.ContainerNode{}, false)
|
||||
}
|
||||
|
||||
newRunner := func(config config.GinkgoConfigType, beforeSuiteNode leafnodes.SuiteNode, afterSuiteNode leafnodes.SuiteNode, specs ...*spec.Spec) *SpecRunner {
|
||||
return New("description", beforeSuiteNode, spec.NewSpecs(specs), afterSuiteNode, []reporters.Reporter{reporter1, reporter2}, writer, config)
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
reporter1 = reporters.NewFakeReporter()
|
||||
reporter2 = reporters.NewFakeReporter()
|
||||
writer = Writer.NewFake()
|
||||
failer = Failer.New()
|
||||
|
||||
thingsThatRan = []string{}
|
||||
})
|
||||
|
||||
Describe("Running and Reporting", func() {
|
||||
var specA, pendingSpec, anotherPendingSpec, failedSpec, specB, skippedSpec *spec.Spec
|
||||
var willRunCalls, didCompleteCalls []string
|
||||
var conf config.GinkgoConfigType
|
||||
|
||||
JustBeforeEach(func() {
|
||||
willRunCalls = []string{}
|
||||
didCompleteCalls = []string{}
|
||||
specA = newSpec("spec A", noneFlag, false)
|
||||
pendingSpec = newSpec("pending spec", pendingFlag, false)
|
||||
anotherPendingSpec = newSpec("another pending spec", pendingFlag, false)
|
||||
failedSpec = newSpec("failed spec", noneFlag, true)
|
||||
specB = newSpec("spec B", noneFlag, false)
|
||||
skippedSpec = newSpec("skipped spec", noneFlag, false)
|
||||
skippedSpec.Skip()
|
||||
|
||||
reporter1.SpecWillRunStub = func(specSummary *types.SpecSummary) {
|
||||
willRunCalls = append(willRunCalls, "Reporter1")
|
||||
}
|
||||
reporter2.SpecWillRunStub = func(specSummary *types.SpecSummary) {
|
||||
willRunCalls = append(willRunCalls, "Reporter2")
|
||||
}
|
||||
|
||||
reporter1.SpecDidCompleteStub = func(specSummary *types.SpecSummary) {
|
||||
didCompleteCalls = append(didCompleteCalls, "Reporter1")
|
||||
}
|
||||
reporter2.SpecDidCompleteStub = func(specSummary *types.SpecSummary) {
|
||||
didCompleteCalls = append(didCompleteCalls, "Reporter2")
|
||||
}
|
||||
|
||||
runner = newRunner(conf, newBefSuite("BefSuite", false), newAftSuite("AftSuite", false), specA, pendingSpec, anotherPendingSpec, failedSpec, specB, skippedSpec)
|
||||
runner.Run()
|
||||
})
|
||||
|
||||
BeforeEach(func() {
|
||||
conf = config.GinkgoConfigType{RandomSeed: 17}
|
||||
})
|
||||
|
||||
It("should skip skipped/pending tests", func() {
|
||||
Ω(thingsThatRan).Should(Equal([]string{"BefSuite", "spec A", "failed spec", "spec B", "AftSuite"}))
|
||||
})
|
||||
|
||||
It("should report to any attached reporters", func() {
|
||||
Ω(reporter1.Config).Should(Equal(reporter2.Config))
|
||||
Ω(reporter1.BeforeSuiteSummary).Should(Equal(reporter2.BeforeSuiteSummary))
|
||||
Ω(reporter1.BeginSummary).Should(Equal(reporter2.BeginSummary))
|
||||
Ω(reporter1.SpecWillRunSummaries).Should(Equal(reporter2.SpecWillRunSummaries))
|
||||
Ω(reporter1.SpecSummaries).Should(Equal(reporter2.SpecSummaries))
|
||||
Ω(reporter1.AfterSuiteSummary).Should(Equal(reporter2.AfterSuiteSummary))
|
||||
Ω(reporter1.EndSummary).Should(Equal(reporter2.EndSummary))
|
||||
})
|
||||
|
||||
It("should report that a spec did end in reverse order", func() {
|
||||
Ω(willRunCalls[0:4]).Should(Equal([]string{"Reporter1", "Reporter2", "Reporter1", "Reporter2"}))
|
||||
Ω(didCompleteCalls[0:4]).Should(Equal([]string{"Reporter2", "Reporter1", "Reporter2", "Reporter1"}))
|
||||
})
|
||||
|
||||
It("should report the passed in config", func() {
|
||||
Ω(reporter1.Config.RandomSeed).Should(BeNumerically("==", 17))
|
||||
})
|
||||
|
||||
It("should report the beginning of the suite", func() {
|
||||
Ω(reporter1.BeginSummary.SuiteDescription).Should(Equal("description"))
|
||||
Ω(reporter1.BeginSummary.SuiteID).Should(MatchRegexp("[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}"))
|
||||
Ω(reporter1.BeginSummary.NumberOfSpecsBeforeParallelization).Should(Equal(6))
|
||||
Ω(reporter1.BeginSummary.NumberOfTotalSpecs).Should(Equal(6))
|
||||
Ω(reporter1.BeginSummary.NumberOfSpecsThatWillBeRun).Should(Equal(3))
|
||||
Ω(reporter1.BeginSummary.NumberOfPendingSpecs).Should(Equal(2))
|
||||
Ω(reporter1.BeginSummary.NumberOfSkippedSpecs).Should(Equal(1))
|
||||
})
|
||||
|
||||
It("should report the end of the suite", func() {
|
||||
Ω(reporter1.EndSummary.SuiteDescription).Should(Equal("description"))
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.SuiteID).Should(MatchRegexp("[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}"))
|
||||
Ω(reporter1.EndSummary.NumberOfSpecsBeforeParallelization).Should(Equal(6))
|
||||
Ω(reporter1.EndSummary.NumberOfTotalSpecs).Should(Equal(6))
|
||||
Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(3))
|
||||
Ω(reporter1.EndSummary.NumberOfPendingSpecs).Should(Equal(2))
|
||||
Ω(reporter1.EndSummary.NumberOfSkippedSpecs).Should(Equal(1))
|
||||
Ω(reporter1.EndSummary.NumberOfPassedSpecs).Should(Equal(2))
|
||||
Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(1))
|
||||
})
|
||||
|
||||
Context("when told to perform a dry run", func() {
|
||||
BeforeEach(func() {
|
||||
conf.DryRun = true
|
||||
})
|
||||
|
||||
It("should report to the reporters", func() {
|
||||
Ω(reporter1.Config).Should(Equal(reporter2.Config))
|
||||
Ω(reporter1.BeforeSuiteSummary).Should(Equal(reporter2.BeforeSuiteSummary))
|
||||
Ω(reporter1.BeginSummary).Should(Equal(reporter2.BeginSummary))
|
||||
Ω(reporter1.SpecWillRunSummaries).Should(Equal(reporter2.SpecWillRunSummaries))
|
||||
Ω(reporter1.SpecSummaries).Should(Equal(reporter2.SpecSummaries))
|
||||
Ω(reporter1.AfterSuiteSummary).Should(Equal(reporter2.AfterSuiteSummary))
|
||||
Ω(reporter1.EndSummary).Should(Equal(reporter2.EndSummary))
|
||||
})
|
||||
|
||||
It("should not actually run anything", func() {
|
||||
Ω(thingsThatRan).Should(BeEmpty())
|
||||
})
|
||||
|
||||
It("report before and after suites as passed", func() {
|
||||
Ω(reporter1.BeforeSuiteSummary.State).Should(Equal(types.SpecStatePassed))
|
||||
Ω(reporter1.AfterSuiteSummary.State).Should(Equal(types.SpecStatePassed))
|
||||
})
|
||||
|
||||
It("should report specs as passed", func() {
|
||||
summaries := reporter1.SpecSummaries
|
||||
Ω(summaries).Should(HaveLen(6))
|
||||
Ω(summaries[0].ComponentTexts).Should(ContainElement("spec A"))
|
||||
Ω(summaries[0].State).Should(Equal(types.SpecStatePassed))
|
||||
Ω(summaries[1].ComponentTexts).Should(ContainElement("pending spec"))
|
||||
Ω(summaries[1].State).Should(Equal(types.SpecStatePending))
|
||||
Ω(summaries[2].ComponentTexts).Should(ContainElement("another pending spec"))
|
||||
Ω(summaries[2].State).Should(Equal(types.SpecStatePending))
|
||||
Ω(summaries[3].ComponentTexts).Should(ContainElement("failed spec"))
|
||||
Ω(summaries[3].State).Should(Equal(types.SpecStatePassed))
|
||||
Ω(summaries[4].ComponentTexts).Should(ContainElement("spec B"))
|
||||
Ω(summaries[4].State).Should(Equal(types.SpecStatePassed))
|
||||
Ω(summaries[5].ComponentTexts).Should(ContainElement("skipped spec"))
|
||||
Ω(summaries[5].State).Should(Equal(types.SpecStateSkipped))
|
||||
})
|
||||
|
||||
It("should report the end of the suite", func() {
|
||||
Ω(reporter1.EndSummary.SuiteDescription).Should(Equal("description"))
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeTrue())
|
||||
Ω(reporter1.EndSummary.SuiteID).Should(MatchRegexp("[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}"))
|
||||
Ω(reporter1.EndSummary.NumberOfSpecsBeforeParallelization).Should(Equal(6))
|
||||
Ω(reporter1.EndSummary.NumberOfTotalSpecs).Should(Equal(6))
|
||||
Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(3))
|
||||
Ω(reporter1.EndSummary.NumberOfPendingSpecs).Should(Equal(2))
|
||||
Ω(reporter1.EndSummary.NumberOfSkippedSpecs).Should(Equal(1))
|
||||
Ω(reporter1.EndSummary.NumberOfPassedSpecs).Should(Equal(0))
|
||||
Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(0))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("reporting on specs", func() {
|
||||
var proceed chan bool
|
||||
var ready chan bool
|
||||
var finished chan bool
|
||||
BeforeEach(func() {
|
||||
ready = make(chan bool)
|
||||
proceed = make(chan bool)
|
||||
finished = make(chan bool)
|
||||
skippedSpec := newSpec("SKIP", noneFlag, false)
|
||||
skippedSpec.Skip()
|
||||
|
||||
runner = newRunner(
|
||||
config.GinkgoConfigType{},
|
||||
newBefSuite("BefSuite", false),
|
||||
newAftSuite("AftSuite", false),
|
||||
skippedSpec,
|
||||
newSpec("PENDING", pendingFlag, false),
|
||||
newSpecWithBody("RUN", func() {
|
||||
close(ready)
|
||||
<-proceed
|
||||
}),
|
||||
)
|
||||
go func() {
|
||||
runner.Run()
|
||||
close(finished)
|
||||
}()
|
||||
})
|
||||
|
||||
It("should report about pending/skipped specs", func() {
|
||||
<-ready
|
||||
Ω(reporter1.SpecWillRunSummaries).Should(HaveLen(3))
|
||||
|
||||
Ω(reporter1.SpecWillRunSummaries[0].ComponentTexts[0]).Should(Equal("SKIP"))
|
||||
Ω(reporter1.SpecWillRunSummaries[1].ComponentTexts[0]).Should(Equal("PENDING"))
|
||||
Ω(reporter1.SpecWillRunSummaries[2].ComponentTexts[0]).Should(Equal("RUN"))
|
||||
|
||||
Ω(reporter1.SpecSummaries[0].ComponentTexts[0]).Should(Equal("SKIP"))
|
||||
Ω(reporter1.SpecSummaries[1].ComponentTexts[0]).Should(Equal("PENDING"))
|
||||
Ω(reporter1.SpecSummaries).Should(HaveLen(2))
|
||||
|
||||
close(proceed)
|
||||
<-finished
|
||||
|
||||
Ω(reporter1.SpecSummaries).Should(HaveLen(3))
|
||||
Ω(reporter1.SpecSummaries[2].ComponentTexts[0]).Should(Equal("RUN"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Running BeforeSuite & AfterSuite", func() {
|
||||
var success bool
|
||||
var befSuite leafnodes.SuiteNode
|
||||
var aftSuite leafnodes.SuiteNode
|
||||
Context("with a nil BeforeSuite & AfterSuite", func() {
|
||||
BeforeEach(func() {
|
||||
runner = newRunner(
|
||||
config.GinkgoConfigType{},
|
||||
nil,
|
||||
nil,
|
||||
newSpec("A", noneFlag, false),
|
||||
newSpec("B", noneFlag, false),
|
||||
)
|
||||
success = runner.Run()
|
||||
})
|
||||
|
||||
It("should not report about the BeforeSuite", func() {
|
||||
Ω(reporter1.BeforeSuiteSummary).Should(BeNil())
|
||||
})
|
||||
|
||||
It("should not report about the AfterSuite", func() {
|
||||
Ω(reporter1.AfterSuiteSummary).Should(BeNil())
|
||||
})
|
||||
|
||||
It("should run the specs", func() {
|
||||
Ω(thingsThatRan).Should(Equal([]string{"A", "B"}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the BeforeSuite & AfterSuite pass", func() {
|
||||
BeforeEach(func() {
|
||||
befSuite = newBefSuite("BefSuite", false)
|
||||
aftSuite = newBefSuite("AftSuite", false)
|
||||
runner = newRunner(
|
||||
config.GinkgoConfigType{},
|
||||
befSuite,
|
||||
aftSuite,
|
||||
newSpec("A", noneFlag, false),
|
||||
newSpec("B", noneFlag, false),
|
||||
)
|
||||
success = runner.Run()
|
||||
})
|
||||
|
||||
It("should run the BeforeSuite, the AfterSuite and the specs", func() {
|
||||
Ω(thingsThatRan).Should(Equal([]string{"BefSuite", "A", "B", "AftSuite"}))
|
||||
})
|
||||
|
||||
It("should report about the BeforeSuite", func() {
|
||||
Ω(reporter1.BeforeSuiteSummary).Should(Equal(befSuite.Summary()))
|
||||
})
|
||||
|
||||
It("should report about the AfterSuite", func() {
|
||||
Ω(reporter1.AfterSuiteSummary).Should(Equal(aftSuite.Summary()))
|
||||
})
|
||||
|
||||
It("should report success", func() {
|
||||
Ω(success).Should(BeTrue())
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeTrue())
|
||||
Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(0))
|
||||
})
|
||||
|
||||
It("should not dump the writer", func() {
|
||||
Ω(writer.EventStream).ShouldNot(ContainElement("DUMP"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the BeforeSuite fails", func() {
|
||||
BeforeEach(func() {
|
||||
befSuite = newBefSuite("BefSuite", true)
|
||||
aftSuite = newBefSuite("AftSuite", false)
|
||||
|
||||
skipped := newSpec("Skipped", noneFlag, false)
|
||||
skipped.Skip()
|
||||
|
||||
runner = newRunner(
|
||||
config.GinkgoConfigType{},
|
||||
befSuite,
|
||||
aftSuite,
|
||||
newSpec("A", noneFlag, false),
|
||||
newSpec("B", noneFlag, false),
|
||||
newSpec("Pending", pendingFlag, false),
|
||||
skipped,
|
||||
)
|
||||
success = runner.Run()
|
||||
})
|
||||
|
||||
It("should not run the specs, but it should run the AfterSuite", func() {
|
||||
Ω(thingsThatRan).Should(Equal([]string{"BefSuite", "AftSuite"}))
|
||||
})
|
||||
|
||||
It("should report about the BeforeSuite", func() {
|
||||
Ω(reporter1.BeforeSuiteSummary).Should(Equal(befSuite.Summary()))
|
||||
})
|
||||
|
||||
It("should report about the AfterSuite", func() {
|
||||
Ω(reporter1.AfterSuiteSummary).Should(Equal(aftSuite.Summary()))
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(success).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(2))
|
||||
Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(2))
|
||||
})
|
||||
|
||||
It("should dump the writer", func() {
|
||||
Ω(writer.EventStream).Should(ContainElement("DUMP"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when some other test fails", func() {
|
||||
BeforeEach(func() {
|
||||
aftSuite = newBefSuite("AftSuite", false)
|
||||
|
||||
runner = newRunner(
|
||||
config.GinkgoConfigType{},
|
||||
nil,
|
||||
aftSuite,
|
||||
newSpec("A", noneFlag, true),
|
||||
)
|
||||
success = runner.Run()
|
||||
})
|
||||
|
||||
It("should still run the AfterSuite", func() {
|
||||
Ω(thingsThatRan).Should(Equal([]string{"A", "AftSuite"}))
|
||||
})
|
||||
|
||||
It("should report about the AfterSuite", func() {
|
||||
Ω(reporter1.AfterSuiteSummary).Should(Equal(aftSuite.Summary()))
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(success).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(1))
|
||||
Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(1))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the AfterSuite fails", func() {
|
||||
BeforeEach(func() {
|
||||
befSuite = newBefSuite("BefSuite", false)
|
||||
aftSuite = newBefSuite("AftSuite", true)
|
||||
runner = newRunner(
|
||||
config.GinkgoConfigType{},
|
||||
befSuite,
|
||||
aftSuite,
|
||||
newSpec("A", noneFlag, false),
|
||||
newSpec("B", noneFlag, false),
|
||||
)
|
||||
success = runner.Run()
|
||||
})
|
||||
|
||||
It("should run everything", func() {
|
||||
Ω(thingsThatRan).Should(Equal([]string{"BefSuite", "A", "B", "AftSuite"}))
|
||||
})
|
||||
|
||||
It("should report about the BeforeSuite", func() {
|
||||
Ω(reporter1.BeforeSuiteSummary).Should(Equal(befSuite.Summary()))
|
||||
})
|
||||
|
||||
It("should report about the AfterSuite", func() {
|
||||
Ω(reporter1.AfterSuiteSummary).Should(Equal(aftSuite.Summary()))
|
||||
})
|
||||
|
||||
It("should report failure", func() {
|
||||
Ω(success).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(0))
|
||||
})
|
||||
|
||||
It("should dump the writer", func() {
|
||||
Ω(writer.EventStream).Should(ContainElement("DUMP"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("When instructed to fail fast", func() {
|
||||
BeforeEach(func() {
|
||||
conf := config.GinkgoConfigType{
|
||||
FailFast: true,
|
||||
}
|
||||
runner = newRunner(conf, nil, newAftSuite("after-suite", false), newSpec("passing", noneFlag, false), newSpec("failing", noneFlag, true), newSpec("dont-see", noneFlag, true), newSpec("dont-see", noneFlag, true))
|
||||
})
|
||||
|
||||
It("should return false, report failure, and not run anything past the failing test", func() {
|
||||
Ω(runner.Run()).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse())
|
||||
Ω(thingsThatRan).Should(Equal([]string{"passing", "failing", "after-suite"}))
|
||||
})
|
||||
|
||||
It("should announce the subsequent specs as skipped", func() {
|
||||
runner.Run()
|
||||
Ω(reporter1.SpecSummaries).Should(HaveLen(4))
|
||||
Ω(reporter1.SpecSummaries[2].State).Should(Equal(types.SpecStateSkipped))
|
||||
Ω(reporter1.SpecSummaries[3].State).Should(Equal(types.SpecStateSkipped))
|
||||
})
|
||||
|
||||
It("should mark all subsequent specs as skipped", func() {
|
||||
runner.Run()
|
||||
Ω(reporter1.EndSummary.NumberOfSkippedSpecs).Should(Equal(2))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Marking failure and success", func() {
|
||||
Context("when all tests pass", func() {
|
||||
BeforeEach(func() {
|
||||
runner = newRunner(config.GinkgoConfigType{}, nil, nil, newSpec("passing", noneFlag, false), newSpec("pending", pendingFlag, false))
|
||||
})
|
||||
|
||||
It("should return true and report success", func() {
|
||||
Ω(runner.Run()).Should(BeTrue())
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a test fails", func() {
|
||||
BeforeEach(func() {
|
||||
runner = newRunner(config.GinkgoConfigType{}, nil, nil, newSpec("failing", noneFlag, true), newSpec("pending", pendingFlag, false))
|
||||
})
|
||||
|
||||
It("should return false and report failure", func() {
|
||||
Ω(runner.Run()).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when there is a pending test, but pendings count as failures", func() {
|
||||
BeforeEach(func() {
|
||||
runner = newRunner(config.GinkgoConfigType{FailOnPending: true}, nil, nil, newSpec("passing", noneFlag, false), newSpec("pending", pendingFlag, false))
|
||||
})
|
||||
|
||||
It("should return false and report failure", func() {
|
||||
Ω(runner.Run()).Should(BeFalse())
|
||||
Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Managing the writer", func() {
|
||||
BeforeEach(func() {
|
||||
runner = newRunner(
|
||||
config.GinkgoConfigType{},
|
||||
nil,
|
||||
nil,
|
||||
newSpec("A", noneFlag, false),
|
||||
newSpec("B", noneFlag, true),
|
||||
newSpec("C", noneFlag, false),
|
||||
)
|
||||
reporter1.SpecWillRunStub = func(specSummary *types.SpecSummary) {
|
||||
writer.AddEvent("R1.WillRun")
|
||||
}
|
||||
reporter2.SpecWillRunStub = func(specSummary *types.SpecSummary) {
|
||||
writer.AddEvent("R2.WillRun")
|
||||
}
|
||||
reporter1.SpecDidCompleteStub = func(specSummary *types.SpecSummary) {
|
||||
writer.AddEvent("R1.DidComplete")
|
||||
}
|
||||
reporter2.SpecDidCompleteStub = func(specSummary *types.SpecSummary) {
|
||||
writer.AddEvent("R2.DidComplete")
|
||||
}
|
||||
runner.Run()
|
||||
})
|
||||
|
||||
It("should truncate between tests, but only dump if a test fails", func() {
|
||||
Ω(writer.EventStream).Should(Equal([]string{
|
||||
"TRUNCATE",
|
||||
"R1.WillRun",
|
||||
"R2.WillRun",
|
||||
"A",
|
||||
"R2.DidComplete",
|
||||
"R1.DidComplete",
|
||||
"TRUNCATE",
|
||||
"R1.WillRun",
|
||||
"R2.WillRun",
|
||||
"B",
|
||||
"R2.DidComplete",
|
||||
"DUMP",
|
||||
"R1.DidComplete",
|
||||
"TRUNCATE",
|
||||
"R1.WillRun",
|
||||
"R2.WillRun",
|
||||
"C",
|
||||
"R2.DidComplete",
|
||||
"R1.DidComplete",
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("CurrentSpecSummary", func() {
|
||||
It("should return the spec summary for the currently running spec", func() {
|
||||
var summary *types.SpecSummary
|
||||
runner = newRunner(
|
||||
config.GinkgoConfigType{},
|
||||
nil,
|
||||
nil,
|
||||
newSpec("A", noneFlag, false),
|
||||
newSpecWithBody("B", func() {
|
||||
var ok bool
|
||||
summary, ok = runner.CurrentSpecSummary()
|
||||
Ω(ok).Should(BeTrue())
|
||||
}),
|
||||
newSpec("C", noneFlag, false),
|
||||
)
|
||||
runner.Run()
|
||||
|
||||
Ω(summary.ComponentTexts).Should(Equal([]string{"B"}))
|
||||
|
||||
summary, ok := runner.CurrentSpecSummary()
|
||||
Ω(summary).Should(BeNil())
|
||||
Ω(ok).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("When running tests in parallel", func() {
|
||||
It("reports the correct number of specs before parallelization", func() {
|
||||
specs := spec.NewSpecs([]*spec.Spec{
|
||||
newSpec("A", noneFlag, false),
|
||||
newSpec("B", pendingFlag, false),
|
||||
newSpec("C", noneFlag, false),
|
||||
})
|
||||
specs.TrimForParallelization(2, 1)
|
||||
runner = New("description", nil, specs, nil, []reporters.Reporter{reporter1, reporter2}, writer, config.GinkgoConfigType{})
|
||||
runner.Run()
|
||||
|
||||
Ω(reporter1.EndSummary.NumberOfSpecsBeforeParallelization).Should(Equal(3))
|
||||
Ω(reporter1.EndSummary.NumberOfTotalSpecs).Should(Equal(2))
|
||||
Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(1))
|
||||
Ω(reporter1.EndSummary.NumberOfPendingSpecs).Should(Equal(1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("generating a suite id", func() {
|
||||
It("should generate an id randomly", func() {
|
||||
runnerA := newRunner(config.GinkgoConfigType{}, nil, nil)
|
||||
runnerA.Run()
|
||||
IDA := reporter1.BeginSummary.SuiteID
|
||||
|
||||
runnerB := newRunner(config.GinkgoConfigType{}, nil, nil)
|
||||
runnerB.Run()
|
||||
IDB := reporter1.BeginSummary.SuiteID
|
||||
|
||||
IDRegexp := "[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}"
|
||||
Ω(IDA).Should(MatchRegexp(IDRegexp))
|
||||
Ω(IDB).Should(MatchRegexp(IDRegexp))
|
||||
|
||||
Ω(IDA).ShouldNot(Equal(IDB))
|
||||
})
|
||||
})
|
||||
})
|
||||
35
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_suite_test.go
generated
vendored
35
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_suite_test.go
generated
vendored
@@ -1,35 +0,0 @@
|
||||
package suite_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Suite")
|
||||
}
|
||||
|
||||
var numBeforeSuiteRuns = 0
|
||||
var numAfterSuiteRuns = 0
|
||||
|
||||
var _ = BeforeSuite(func() {
|
||||
numBeforeSuiteRuns++
|
||||
})
|
||||
|
||||
var _ = AfterSuite(func() {
|
||||
numAfterSuiteRuns++
|
||||
Ω(numBeforeSuiteRuns).Should(Equal(1))
|
||||
Ω(numAfterSuiteRuns).Should(Equal(1))
|
||||
})
|
||||
|
||||
//Fakes
|
||||
type fakeTestingT struct {
|
||||
didFail bool
|
||||
}
|
||||
|
||||
func (fakeT *fakeTestingT) Fail() {
|
||||
fakeT.didFail = true
|
||||
}
|
||||
398
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_test.go
generated
vendored
398
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_test.go
generated
vendored
@@ -1,398 +0,0 @@
|
||||
package suite_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/suite"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"math/rand"
|
||||
"time"
|
||||
|
||||
"github.com/onsi/ginkgo/config"
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
Failer "github.com/onsi/ginkgo/internal/failer"
|
||||
Writer "github.com/onsi/ginkgo/internal/writer"
|
||||
"github.com/onsi/ginkgo/reporters"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
)
|
||||
|
||||
var _ = Describe("Suite", func() {
|
||||
var (
|
||||
specSuite *Suite
|
||||
fakeT *fakeTestingT
|
||||
fakeR *reporters.FakeReporter
|
||||
writer *Writer.FakeGinkgoWriter
|
||||
failer *Failer.Failer
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
writer = Writer.NewFake()
|
||||
fakeT = &fakeTestingT{}
|
||||
fakeR = reporters.NewFakeReporter()
|
||||
failer = Failer.New()
|
||||
specSuite = New(failer)
|
||||
})
|
||||
|
||||
Describe("running a suite", func() {
|
||||
var (
|
||||
runOrder []string
|
||||
randomizeAllSpecs bool
|
||||
randomSeed int64
|
||||
focusString string
|
||||
parallelNode int
|
||||
parallelTotal int
|
||||
runResult bool
|
||||
hasProgrammaticFocus bool
|
||||
)
|
||||
|
||||
var f = func(runText string) func() {
|
||||
return func() {
|
||||
runOrder = append(runOrder, runText)
|
||||
}
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
randomizeAllSpecs = false
|
||||
randomSeed = 11
|
||||
parallelNode = 1
|
||||
parallelTotal = 1
|
||||
focusString = ""
|
||||
|
||||
runOrder = make([]string, 0)
|
||||
specSuite.SetBeforeSuiteNode(f("BeforeSuite"), codelocation.New(0), 0)
|
||||
specSuite.PushBeforeEachNode(f("top BE"), codelocation.New(0), 0)
|
||||
specSuite.PushJustBeforeEachNode(f("top JBE"), codelocation.New(0), 0)
|
||||
specSuite.PushAfterEachNode(f("top AE"), codelocation.New(0), 0)
|
||||
|
||||
specSuite.PushContainerNode("container", func() {
|
||||
specSuite.PushBeforeEachNode(f("BE"), codelocation.New(0), 0)
|
||||
specSuite.PushJustBeforeEachNode(f("JBE"), codelocation.New(0), 0)
|
||||
specSuite.PushAfterEachNode(f("AE"), codelocation.New(0), 0)
|
||||
specSuite.PushItNode("it", f("IT"), types.FlagTypeNone, codelocation.New(0), 0)
|
||||
|
||||
specSuite.PushContainerNode("inner container", func() {
|
||||
specSuite.PushItNode("inner it", f("inner IT"), types.FlagTypeNone, codelocation.New(0), 0)
|
||||
}, types.FlagTypeNone, codelocation.New(0))
|
||||
}, types.FlagTypeNone, codelocation.New(0))
|
||||
|
||||
specSuite.PushContainerNode("container 2", func() {
|
||||
specSuite.PushBeforeEachNode(f("BE 2"), codelocation.New(0), 0)
|
||||
specSuite.PushItNode("it 2", f("IT 2"), types.FlagTypeNone, codelocation.New(0), 0)
|
||||
}, types.FlagTypeNone, codelocation.New(0))
|
||||
|
||||
specSuite.PushItNode("top level it", f("top IT"), types.FlagTypeNone, codelocation.New(0), 0)
|
||||
|
||||
specSuite.SetAfterSuiteNode(f("AfterSuite"), codelocation.New(0), 0)
|
||||
})
|
||||
|
||||
JustBeforeEach(func() {
|
||||
runResult, hasProgrammaticFocus = specSuite.Run(fakeT, "suite description", []reporters.Reporter{fakeR}, writer, config.GinkgoConfigType{
|
||||
RandomSeed: randomSeed,
|
||||
RandomizeAllSpecs: randomizeAllSpecs,
|
||||
FocusString: focusString,
|
||||
ParallelNode: parallelNode,
|
||||
ParallelTotal: parallelTotal,
|
||||
})
|
||||
})
|
||||
|
||||
It("provides the config and suite description to the reporter", func() {
|
||||
Ω(fakeR.Config.RandomSeed).Should(Equal(int64(randomSeed)))
|
||||
Ω(fakeR.Config.RandomizeAllSpecs).Should(Equal(randomizeAllSpecs))
|
||||
Ω(fakeR.BeginSummary.SuiteDescription).Should(Equal("suite description"))
|
||||
})
|
||||
|
||||
It("reports that the BeforeSuite node ran", func() {
|
||||
Ω(fakeR.BeforeSuiteSummary).ShouldNot(BeNil())
|
||||
})
|
||||
|
||||
It("reports that the AfterSuite node ran", func() {
|
||||
Ω(fakeR.AfterSuiteSummary).ShouldNot(BeNil())
|
||||
})
|
||||
|
||||
It("provides information about the current test", func() {
|
||||
description := CurrentGinkgoTestDescription()
|
||||
Ω(description.ComponentTexts).Should(Equal([]string{"Suite", "running a suite", "provides information about the current test"}))
|
||||
Ω(description.FullTestText).Should(Equal("Suite running a suite provides information about the current test"))
|
||||
Ω(description.TestText).Should(Equal("provides information about the current test"))
|
||||
Ω(description.IsMeasurement).Should(BeFalse())
|
||||
Ω(description.FileName).Should(ContainSubstring("suite_test.go"))
|
||||
Ω(description.LineNumber).Should(BeNumerically(">", 50))
|
||||
Ω(description.LineNumber).Should(BeNumerically("<", 150))
|
||||
})
|
||||
|
||||
Measure("should run measurements", func(b Benchmarker) {
|
||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
|
||||
runtime := b.Time("sleeping", func() {
|
||||
sleepTime := time.Duration(r.Float64() * 0.01 * float64(time.Second))
|
||||
time.Sleep(sleepTime)
|
||||
})
|
||||
Ω(runtime.Seconds()).Should(BeNumerically("<=", 0.015))
|
||||
Ω(runtime.Seconds()).Should(BeNumerically(">=", 0))
|
||||
|
||||
randomValue := r.Float64() * 10.0
|
||||
b.RecordValue("random value", randomValue)
|
||||
Ω(randomValue).Should(BeNumerically("<=", 10.0))
|
||||
Ω(randomValue).Should(BeNumerically(">=", 0.0))
|
||||
}, 10)
|
||||
|
||||
It("creates a node hierarchy, converts it to a spec collection, and runs it", func() {
|
||||
Ω(runOrder).Should(Equal([]string{
|
||||
"BeforeSuite",
|
||||
"top BE", "BE", "top JBE", "JBE", "IT", "AE", "top AE",
|
||||
"top BE", "BE", "top JBE", "JBE", "inner IT", "AE", "top AE",
|
||||
"top BE", "BE 2", "top JBE", "IT 2", "top AE",
|
||||
"top BE", "top JBE", "top IT", "top AE",
|
||||
"AfterSuite",
|
||||
}))
|
||||
})
|
||||
|
||||
Context("when told to randomize all specs", func() {
|
||||
BeforeEach(func() {
|
||||
randomizeAllSpecs = true
|
||||
})
|
||||
|
||||
It("does", func() {
|
||||
Ω(runOrder).Should(Equal([]string{
|
||||
"BeforeSuite",
|
||||
"top BE", "top JBE", "top IT", "top AE",
|
||||
"top BE", "BE", "top JBE", "JBE", "inner IT", "AE", "top AE",
|
||||
"top BE", "BE", "top JBE", "JBE", "IT", "AE", "top AE",
|
||||
"top BE", "BE 2", "top JBE", "IT 2", "top AE",
|
||||
"AfterSuite",
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("with ginkgo.parallel.total > 1", func() {
|
||||
BeforeEach(func() {
|
||||
parallelTotal = 2
|
||||
randomizeAllSpecs = true
|
||||
})
|
||||
|
||||
Context("for one worker", func() {
|
||||
BeforeEach(func() {
|
||||
parallelNode = 1
|
||||
})
|
||||
|
||||
It("should run a subset of tests", func() {
|
||||
Ω(runOrder).Should(Equal([]string{
|
||||
"BeforeSuite",
|
||||
"top BE", "top JBE", "top IT", "top AE",
|
||||
"top BE", "BE", "top JBE", "JBE", "inner IT", "AE", "top AE",
|
||||
"AfterSuite",
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("for another worker", func() {
|
||||
BeforeEach(func() {
|
||||
parallelNode = 2
|
||||
})
|
||||
|
||||
It("should run a (different) subset of tests", func() {
|
||||
Ω(runOrder).Should(Equal([]string{
|
||||
"BeforeSuite",
|
||||
"top BE", "BE", "top JBE", "JBE", "IT", "AE", "top AE",
|
||||
"top BE", "BE 2", "top JBE", "IT 2", "top AE",
|
||||
"AfterSuite",
|
||||
}))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("when provided with a filter", func() {
|
||||
BeforeEach(func() {
|
||||
focusString = `inner|\d`
|
||||
})
|
||||
|
||||
It("converts the filter to a regular expression and uses it to filter the running specs", func() {
|
||||
Ω(runOrder).Should(Equal([]string{
|
||||
"BeforeSuite",
|
||||
"top BE", "BE", "top JBE", "JBE", "inner IT", "AE", "top AE",
|
||||
"top BE", "BE 2", "top JBE", "IT 2", "top AE",
|
||||
"AfterSuite",
|
||||
}))
|
||||
})
|
||||
|
||||
It("should not report a programmatic focus", func() {
|
||||
Ω(hasProgrammaticFocus).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("with a programatically focused spec", func() {
|
||||
BeforeEach(func() {
|
||||
specSuite.PushItNode("focused it", f("focused it"), types.FlagTypeFocused, codelocation.New(0), 0)
|
||||
|
||||
specSuite.PushContainerNode("focused container", func() {
|
||||
specSuite.PushItNode("inner focused it", f("inner focused it"), types.FlagTypeFocused, codelocation.New(0), 0)
|
||||
specSuite.PushItNode("inner unfocused it", f("inner unfocused it"), types.FlagTypeNone, codelocation.New(0), 0)
|
||||
}, types.FlagTypeFocused, codelocation.New(0))
|
||||
|
||||
})
|
||||
|
||||
It("should only run the focused test, applying backpropagation to favor most deeply focused leaf nodes", func() {
|
||||
Ω(runOrder).Should(Equal([]string{
|
||||
"BeforeSuite",
|
||||
"top BE", "top JBE", "focused it", "top AE",
|
||||
"top BE", "top JBE", "inner focused it", "top AE",
|
||||
"AfterSuite",
|
||||
}))
|
||||
})
|
||||
|
||||
It("should report a programmatic focus", func() {
|
||||
Ω(hasProgrammaticFocus).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the specs pass", func() {
|
||||
It("doesn't report a failure", func() {
|
||||
Ω(fakeT.didFail).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("should return true", func() {
|
||||
Ω(runResult).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a spec fails", func() {
|
||||
var location types.CodeLocation
|
||||
BeforeEach(func() {
|
||||
specSuite.PushItNode("top level it", func() {
|
||||
location = codelocation.New(0)
|
||||
failer.Fail("oops!", location)
|
||||
}, types.FlagTypeNone, codelocation.New(0), 0)
|
||||
})
|
||||
|
||||
It("should return false", func() {
|
||||
Ω(runResult).Should(BeFalse())
|
||||
})
|
||||
|
||||
It("reports a failure", func() {
|
||||
Ω(fakeT.didFail).Should(BeTrue())
|
||||
})
|
||||
|
||||
It("generates the correct failure data", func() {
|
||||
Ω(fakeR.SpecSummaries[0].Failure.Message).Should(Equal("oops!"))
|
||||
Ω(fakeR.SpecSummaries[0].Failure.Location).Should(Equal(location))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when runnable nodes are nested within other runnable nodes", func() {
|
||||
Context("when an It is nested", func() {
|
||||
BeforeEach(func() {
|
||||
specSuite.PushItNode("top level it", func() {
|
||||
specSuite.PushItNode("nested it", f("oops"), types.FlagTypeNone, codelocation.New(0), 0)
|
||||
}, types.FlagTypeNone, codelocation.New(0), 0)
|
||||
})
|
||||
|
||||
It("should fail", func() {
|
||||
Ω(fakeT.didFail).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a Measure is nested", func() {
|
||||
BeforeEach(func() {
|
||||
specSuite.PushItNode("top level it", func() {
|
||||
specSuite.PushMeasureNode("nested measure", func(Benchmarker) {}, types.FlagTypeNone, codelocation.New(0), 10)
|
||||
}, types.FlagTypeNone, codelocation.New(0), 0)
|
||||
})
|
||||
|
||||
It("should fail", func() {
|
||||
Ω(fakeT.didFail).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a BeforeEach is nested", func() {
|
||||
BeforeEach(func() {
|
||||
specSuite.PushItNode("top level it", func() {
|
||||
specSuite.PushBeforeEachNode(f("nested bef"), codelocation.New(0), 0)
|
||||
}, types.FlagTypeNone, codelocation.New(0), 0)
|
||||
})
|
||||
|
||||
It("should fail", func() {
|
||||
Ω(fakeT.didFail).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a JustBeforeEach is nested", func() {
|
||||
BeforeEach(func() {
|
||||
specSuite.PushItNode("top level it", func() {
|
||||
specSuite.PushJustBeforeEachNode(f("nested jbef"), codelocation.New(0), 0)
|
||||
}, types.FlagTypeNone, codelocation.New(0), 0)
|
||||
})
|
||||
|
||||
It("should fail", func() {
|
||||
Ω(fakeT.didFail).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a AfterEach is nested", func() {
|
||||
BeforeEach(func() {
|
||||
specSuite.PushItNode("top level it", func() {
|
||||
specSuite.PushAfterEachNode(f("nested aft"), codelocation.New(0), 0)
|
||||
}, types.FlagTypeNone, codelocation.New(0), 0)
|
||||
})
|
||||
|
||||
It("should fail", func() {
|
||||
Ω(fakeT.didFail).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("BeforeSuite", func() {
|
||||
Context("when setting BeforeSuite more than once", func() {
|
||||
It("should panic", func() {
|
||||
specSuite.SetBeforeSuiteNode(func() {}, codelocation.New(0), 0)
|
||||
|
||||
Ω(func() {
|
||||
specSuite.SetBeforeSuiteNode(func() {}, codelocation.New(0), 0)
|
||||
}).Should(Panic())
|
||||
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("AfterSuite", func() {
|
||||
Context("when setting AfterSuite more than once", func() {
|
||||
It("should panic", func() {
|
||||
specSuite.SetAfterSuiteNode(func() {}, codelocation.New(0), 0)
|
||||
|
||||
Ω(func() {
|
||||
specSuite.SetAfterSuiteNode(func() {}, codelocation.New(0), 0)
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("By", func() {
|
||||
It("writes to the GinkgoWriter", func() {
|
||||
originalGinkgoWriter := GinkgoWriter
|
||||
buffer := &bytes.Buffer{}
|
||||
|
||||
GinkgoWriter = buffer
|
||||
By("Saying Hello GinkgoWriter")
|
||||
GinkgoWriter = originalGinkgoWriter
|
||||
|
||||
Ω(buffer.String()).Should(ContainSubstring("STEP"))
|
||||
Ω(buffer.String()).Should(ContainSubstring(": Saying Hello GinkgoWriter\n"))
|
||||
})
|
||||
|
||||
It("calls the passed-in callback if present", func() {
|
||||
a := 0
|
||||
By("calling the callback", func() {
|
||||
a = 1
|
||||
})
|
||||
Ω(a).Should(Equal(1))
|
||||
})
|
||||
|
||||
It("panics if there is more than one callback", func() {
|
||||
Ω(func() {
|
||||
By("registering more than one callback", func() {}, func() {})
|
||||
}).Should(Panic())
|
||||
})
|
||||
})
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package writer_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWriter(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Writer Suite")
|
||||
}
|
||||
75
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_test.go
generated
vendored
75
Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_test.go
generated
vendored
@@ -1,75 +0,0 @@
|
||||
package writer_test
|
||||
|
||||
import (
|
||||
"github.com/onsi/gomega/gbytes"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/ginkgo/internal/writer"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Writer", func() {
|
||||
var writer *Writer
|
||||
var out *gbytes.Buffer
|
||||
|
||||
BeforeEach(func() {
|
||||
out = gbytes.NewBuffer()
|
||||
writer = New(out)
|
||||
})
|
||||
|
||||
It("should stream directly to the outbuffer by default", func() {
|
||||
writer.Write([]byte("foo"))
|
||||
Ω(out).Should(gbytes.Say("foo"))
|
||||
})
|
||||
|
||||
It("should not emit the header when asked to DumpOutWitHeader", func() {
|
||||
writer.Write([]byte("foo"))
|
||||
writer.DumpOutWithHeader("my header")
|
||||
Ω(out).ShouldNot(gbytes.Say("my header"))
|
||||
Ω(out).Should(gbytes.Say("foo"))
|
||||
})
|
||||
|
||||
Context("when told not to stream", func() {
|
||||
BeforeEach(func() {
|
||||
writer.SetStream(false)
|
||||
})
|
||||
|
||||
It("should only write to the buffer when told to DumpOut", func() {
|
||||
writer.Write([]byte("foo"))
|
||||
Ω(out).ShouldNot(gbytes.Say("foo"))
|
||||
writer.DumpOut()
|
||||
Ω(out).Should(gbytes.Say("foo"))
|
||||
})
|
||||
|
||||
It("should truncate the internal buffer when told to truncate", func() {
|
||||
writer.Write([]byte("foo"))
|
||||
writer.Truncate()
|
||||
writer.DumpOut()
|
||||
Ω(out).ShouldNot(gbytes.Say("foo"))
|
||||
|
||||
writer.Write([]byte("bar"))
|
||||
writer.DumpOut()
|
||||
Ω(out).Should(gbytes.Say("bar"))
|
||||
})
|
||||
|
||||
Describe("emitting a header", func() {
|
||||
Context("when the buffer has content", func() {
|
||||
It("should emit the header followed by the content", func() {
|
||||
writer.Write([]byte("foo"))
|
||||
writer.DumpOutWithHeader("my header")
|
||||
|
||||
Ω(out).Should(gbytes.Say("my header"))
|
||||
Ω(out).Should(gbytes.Say("foo"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the buffer has no content", func() {
|
||||
It("should not emit the header", func() {
|
||||
writer.DumpOutWithHeader("my header")
|
||||
|
||||
Ω(out).ShouldNot(gbytes.Say("my header"))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
397
Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/default_reporter_test.go
generated
vendored
397
Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/default_reporter_test.go
generated
vendored
@@ -1,397 +0,0 @@
|
||||
package reporters_test
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
"github.com/onsi/ginkgo/config"
|
||||
"github.com/onsi/ginkgo/reporters"
|
||||
st "github.com/onsi/ginkgo/reporters/stenographer"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("DefaultReporter", func() {
|
||||
var (
|
||||
reporter *reporters.DefaultReporter
|
||||
reporterConfig config.DefaultReporterConfigType
|
||||
stenographer *st.FakeStenographer
|
||||
|
||||
ginkgoConfig config.GinkgoConfigType
|
||||
suite *types.SuiteSummary
|
||||
spec *types.SpecSummary
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
stenographer = st.NewFakeStenographer()
|
||||
reporterConfig = config.DefaultReporterConfigType{
|
||||
NoColor: false,
|
||||
SlowSpecThreshold: 0.1,
|
||||
NoisyPendings: true,
|
||||
Verbose: true,
|
||||
FullTrace: true,
|
||||
}
|
||||
|
||||
reporter = reporters.NewDefaultReporter(reporterConfig, stenographer)
|
||||
})
|
||||
|
||||
call := func(method string, args ...interface{}) st.FakeStenographerCall {
|
||||
return st.NewFakeStenographerCall(method, args...)
|
||||
}
|
||||
|
||||
Describe("SpecSuiteWillBegin", func() {
|
||||
BeforeEach(func() {
|
||||
suite = &types.SuiteSummary{
|
||||
SuiteDescription: "A Sweet Suite",
|
||||
NumberOfTotalSpecs: 10,
|
||||
NumberOfSpecsThatWillBeRun: 8,
|
||||
}
|
||||
|
||||
ginkgoConfig = config.GinkgoConfigType{
|
||||
RandomSeed: 1138,
|
||||
RandomizeAllSpecs: true,
|
||||
}
|
||||
})
|
||||
|
||||
Context("when a serial (non-parallel) suite begins", func() {
|
||||
BeforeEach(func() {
|
||||
ginkgoConfig.ParallelTotal = 1
|
||||
|
||||
reporter.SpecSuiteWillBegin(ginkgoConfig, suite)
|
||||
})
|
||||
|
||||
It("should announce the suite, then announce the number of specs", func() {
|
||||
Ω(stenographer.Calls()).Should(HaveLen(2))
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuite", "A Sweet Suite", ginkgoConfig.RandomSeed, true, false)))
|
||||
Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceNumberOfSpecs", 8, 10, false)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a parallel suite begins", func() {
|
||||
BeforeEach(func() {
|
||||
ginkgoConfig.ParallelTotal = 2
|
||||
ginkgoConfig.ParallelNode = 1
|
||||
suite.NumberOfSpecsBeforeParallelization = 20
|
||||
|
||||
reporter.SpecSuiteWillBegin(ginkgoConfig, suite)
|
||||
})
|
||||
|
||||
It("should announce the suite, announce that it's a parallel run, then announce the number of specs", func() {
|
||||
Ω(stenographer.Calls()).Should(HaveLen(3))
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuite", "A Sweet Suite", ginkgoConfig.RandomSeed, true, false)))
|
||||
Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceParallelRun", 1, 2, 10, 20, false)))
|
||||
Ω(stenographer.Calls()[2]).Should(Equal(call("AnnounceNumberOfSpecs", 8, 10, false)))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("BeforeSuiteDidRun", func() {
|
||||
Context("when the BeforeSuite passes", func() {
|
||||
It("should announce nothing", func() {
|
||||
reporter.BeforeSuiteDidRun(&types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
})
|
||||
|
||||
Ω(stenographer.Calls()).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the BeforeSuite fails", func() {
|
||||
It("should announce the failure", func() {
|
||||
summary := &types.SetupSummary{
|
||||
State: types.SpecStateFailed,
|
||||
}
|
||||
reporter.BeforeSuiteDidRun(summary)
|
||||
|
||||
Ω(stenographer.Calls()).Should(HaveLen(1))
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceBeforeSuiteFailure", summary, false, true)))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("AfterSuiteDidRun", func() {
|
||||
Context("when the AfterSuite passes", func() {
|
||||
It("should announce nothing", func() {
|
||||
reporter.AfterSuiteDidRun(&types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
})
|
||||
|
||||
Ω(stenographer.Calls()).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the AfterSuite fails", func() {
|
||||
It("should announce the failure", func() {
|
||||
summary := &types.SetupSummary{
|
||||
State: types.SpecStateFailed,
|
||||
}
|
||||
reporter.AfterSuiteDidRun(summary)
|
||||
|
||||
Ω(stenographer.Calls()).Should(HaveLen(1))
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceAfterSuiteFailure", summary, false, true)))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("SpecWillRun", func() {
|
||||
Context("When running in verbose mode", func() {
|
||||
Context("and the spec will run", func() {
|
||||
BeforeEach(func() {
|
||||
spec = &types.SpecSummary{}
|
||||
reporter.SpecWillRun(spec)
|
||||
})
|
||||
|
||||
It("should announce that the spec will run", func() {
|
||||
Ω(stenographer.Calls()).Should(HaveLen(1))
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecWillRun", spec)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("and the spec will not run", func() {
|
||||
Context("because it is pending", func() {
|
||||
BeforeEach(func() {
|
||||
spec = &types.SpecSummary{
|
||||
State: types.SpecStatePending,
|
||||
}
|
||||
reporter.SpecWillRun(spec)
|
||||
})
|
||||
|
||||
It("should announce nothing", func() {
|
||||
Ω(stenographer.Calls()).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("because it is skipped", func() {
|
||||
BeforeEach(func() {
|
||||
spec = &types.SpecSummary{
|
||||
State: types.SpecStateSkipped,
|
||||
}
|
||||
reporter.SpecWillRun(spec)
|
||||
})
|
||||
|
||||
It("should announce nothing", func() {
|
||||
Ω(stenographer.Calls()).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("When running in verbose & succinct mode", func() {
|
||||
BeforeEach(func() {
|
||||
reporterConfig.Succinct = true
|
||||
reporter = reporters.NewDefaultReporter(reporterConfig, stenographer)
|
||||
spec = &types.SpecSummary{}
|
||||
reporter.SpecWillRun(spec)
|
||||
})
|
||||
|
||||
It("should announce nothing", func() {
|
||||
Ω(stenographer.Calls()).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("When not running in verbose mode", func() {
|
||||
BeforeEach(func() {
|
||||
reporterConfig.Verbose = false
|
||||
reporter = reporters.NewDefaultReporter(reporterConfig, stenographer)
|
||||
spec = &types.SpecSummary{}
|
||||
reporter.SpecWillRun(spec)
|
||||
})
|
||||
|
||||
It("should announce nothing", func() {
|
||||
Ω(stenographer.Calls()).Should(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("SpecDidComplete", func() {
|
||||
JustBeforeEach(func() {
|
||||
reporter.SpecDidComplete(spec)
|
||||
})
|
||||
|
||||
BeforeEach(func() {
|
||||
spec = &types.SpecSummary{}
|
||||
})
|
||||
|
||||
Context("When the spec passed", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStatePassed
|
||||
})
|
||||
|
||||
Context("When the spec was a measurement", func() {
|
||||
BeforeEach(func() {
|
||||
spec.IsMeasurement = true
|
||||
})
|
||||
|
||||
It("should announce the measurement", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulMeasurement", spec, false)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec is slow", func() {
|
||||
BeforeEach(func() {
|
||||
spec.RunTime = time.Second
|
||||
})
|
||||
|
||||
It("should announce that it was slow", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulSlowSpec", spec, false)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("Otherwise", func() {
|
||||
It("should announce the succesful spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulSpec", spec)))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec is pending", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStatePending
|
||||
})
|
||||
|
||||
It("should announce the pending spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnouncePendingSpec", spec, true)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec is skipped", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStateSkipped
|
||||
})
|
||||
|
||||
It("should announce the skipped spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSkippedSpec", spec)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec timed out", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStateTimedOut
|
||||
})
|
||||
|
||||
It("should announce the timedout spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecTimedOut", spec, false, true)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec panicked", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStatePanicked
|
||||
})
|
||||
|
||||
It("should announce the panicked spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecPanicked", spec, false, true)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec failed", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStateFailed
|
||||
})
|
||||
|
||||
It("should announce the failed spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecFailed", spec, false, true)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("in succinct mode", func() {
|
||||
BeforeEach(func() {
|
||||
reporterConfig.Succinct = true
|
||||
reporter = reporters.NewDefaultReporter(reporterConfig, stenographer)
|
||||
})
|
||||
|
||||
Context("When the spec passed", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStatePassed
|
||||
})
|
||||
|
||||
Context("When the spec was a measurement", func() {
|
||||
BeforeEach(func() {
|
||||
spec.IsMeasurement = true
|
||||
})
|
||||
|
||||
It("should announce the measurement", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulMeasurement", spec, true)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec is slow", func() {
|
||||
BeforeEach(func() {
|
||||
spec.RunTime = time.Second
|
||||
})
|
||||
|
||||
It("should announce that it was slow", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulSlowSpec", spec, true)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("Otherwise", func() {
|
||||
It("should announce the succesful spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulSpec", spec)))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec is pending", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStatePending
|
||||
})
|
||||
|
||||
It("should announce the pending spec, but never noisily", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnouncePendingSpec", spec, false)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec is skipped", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStateSkipped
|
||||
})
|
||||
|
||||
It("should announce the skipped spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSkippedSpec", spec)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec timed out", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStateTimedOut
|
||||
})
|
||||
|
||||
It("should announce the timedout spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecTimedOut", spec, true, true)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec panicked", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStatePanicked
|
||||
})
|
||||
|
||||
It("should announce the panicked spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecPanicked", spec, true, true)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the spec failed", func() {
|
||||
BeforeEach(func() {
|
||||
spec.State = types.SpecStateFailed
|
||||
})
|
||||
|
||||
It("should announce the failed spec", func() {
|
||||
Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecFailed", spec, true, true)))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("SpecSuiteDidEnd", func() {
|
||||
BeforeEach(func() {
|
||||
suite = &types.SuiteSummary{}
|
||||
reporter.SpecSuiteDidEnd(suite)
|
||||
})
|
||||
|
||||
It("should announce the spec run's completion", func() {
|
||||
Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceSpecRunCompletion", suite, false)))
|
||||
})
|
||||
})
|
||||
})
|
||||
241
Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/junit_reporter_test.go
generated
vendored
241
Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/junit_reporter_test.go
generated
vendored
@@ -1,241 +0,0 @@
|
||||
package reporters_test
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
"github.com/onsi/ginkgo/config"
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
"github.com/onsi/ginkgo/reporters"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("JUnit Reporter", func() {
|
||||
var (
|
||||
outputFile string
|
||||
reporter Reporter
|
||||
)
|
||||
|
||||
readOutputFile := func() reporters.JUnitTestSuite {
|
||||
bytes, err := ioutil.ReadFile(outputFile)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
var suite reporters.JUnitTestSuite
|
||||
err = xml.Unmarshal(bytes, &suite)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
return suite
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
f, err := ioutil.TempFile("", "output")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
f.Close()
|
||||
outputFile = f.Name()
|
||||
|
||||
reporter = reporters.NewJUnitReporter(outputFile)
|
||||
|
||||
reporter.SpecSuiteWillBegin(config.GinkgoConfigType{}, &types.SuiteSummary{
|
||||
SuiteDescription: "My test suite",
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
})
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
os.RemoveAll(outputFile)
|
||||
})
|
||||
|
||||
Describe("a passing test", func() {
|
||||
BeforeEach(func() {
|
||||
beforeSuite := &types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
}
|
||||
reporter.BeforeSuiteDidRun(beforeSuite)
|
||||
|
||||
afterSuite := &types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
}
|
||||
reporter.AfterSuiteDidRun(afterSuite)
|
||||
|
||||
spec := &types.SpecSummary{
|
||||
ComponentTexts: []string{"[Top Level]", "A", "B", "C"},
|
||||
State: types.SpecStatePassed,
|
||||
RunTime: 5 * time.Second,
|
||||
}
|
||||
reporter.SpecWillRun(spec)
|
||||
reporter.SpecDidComplete(spec)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 0,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record the test as passing", func() {
|
||||
output := readOutputFile()
|
||||
Ω(output.Tests).Should(Equal(1))
|
||||
Ω(output.Failures).Should(Equal(0))
|
||||
Ω(output.Time).Should(Equal(10.0))
|
||||
Ω(output.TestCases).Should(HaveLen(1))
|
||||
Ω(output.TestCases[0].Name).Should(Equal("A B C"))
|
||||
Ω(output.TestCases[0].ClassName).Should(Equal("My test suite"))
|
||||
Ω(output.TestCases[0].FailureMessage).Should(BeNil())
|
||||
Ω(output.TestCases[0].Skipped).Should(BeNil())
|
||||
Ω(output.TestCases[0].Time).Should(Equal(5.0))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("when the BeforeSuite fails", func() {
|
||||
var beforeSuite *types.SetupSummary
|
||||
|
||||
BeforeEach(func() {
|
||||
beforeSuite = &types.SetupSummary{
|
||||
State: types.SpecStateFailed,
|
||||
RunTime: 3 * time.Second,
|
||||
Failure: types.SpecFailure{
|
||||
Message: "failed to setup",
|
||||
ComponentCodeLocation: codelocation.New(0),
|
||||
},
|
||||
}
|
||||
reporter.BeforeSuiteDidRun(beforeSuite)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 1,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record the test as having failed", func() {
|
||||
output := readOutputFile()
|
||||
Ω(output.Tests).Should(Equal(1))
|
||||
Ω(output.Failures).Should(Equal(1))
|
||||
Ω(output.Time).Should(Equal(10.0))
|
||||
Ω(output.TestCases[0].Name).Should(Equal("BeforeSuite"))
|
||||
Ω(output.TestCases[0].Time).Should(Equal(3.0))
|
||||
Ω(output.TestCases[0].ClassName).Should(Equal("My test suite"))
|
||||
Ω(output.TestCases[0].FailureMessage.Type).Should(Equal("Failure"))
|
||||
Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring("failed to setup"))
|
||||
Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring(beforeSuite.Failure.ComponentCodeLocation.String()))
|
||||
Ω(output.TestCases[0].Skipped).Should(BeNil())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("when the AfterSuite fails", func() {
|
||||
var afterSuite *types.SetupSummary
|
||||
|
||||
BeforeEach(func() {
|
||||
afterSuite = &types.SetupSummary{
|
||||
State: types.SpecStateFailed,
|
||||
RunTime: 3 * time.Second,
|
||||
Failure: types.SpecFailure{
|
||||
Message: "failed to setup",
|
||||
ComponentCodeLocation: codelocation.New(0),
|
||||
},
|
||||
}
|
||||
reporter.AfterSuiteDidRun(afterSuite)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 1,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record the test as having failed", func() {
|
||||
output := readOutputFile()
|
||||
Ω(output.Tests).Should(Equal(1))
|
||||
Ω(output.Failures).Should(Equal(1))
|
||||
Ω(output.Time).Should(Equal(10.0))
|
||||
Ω(output.TestCases[0].Name).Should(Equal("AfterSuite"))
|
||||
Ω(output.TestCases[0].Time).Should(Equal(3.0))
|
||||
Ω(output.TestCases[0].ClassName).Should(Equal("My test suite"))
|
||||
Ω(output.TestCases[0].FailureMessage.Type).Should(Equal("Failure"))
|
||||
Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring("failed to setup"))
|
||||
Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring(afterSuite.Failure.ComponentCodeLocation.String()))
|
||||
Ω(output.TestCases[0].Skipped).Should(BeNil())
|
||||
})
|
||||
})
|
||||
|
||||
specStateCases := []struct {
|
||||
state types.SpecState
|
||||
message string
|
||||
}{
|
||||
{types.SpecStateFailed, "Failure"},
|
||||
{types.SpecStateTimedOut, "Timeout"},
|
||||
{types.SpecStatePanicked, "Panic"},
|
||||
}
|
||||
|
||||
for _, specStateCase := range specStateCases {
|
||||
specStateCase := specStateCase
|
||||
Describe("a failing test", func() {
|
||||
var spec *types.SpecSummary
|
||||
BeforeEach(func() {
|
||||
spec = &types.SpecSummary{
|
||||
ComponentTexts: []string{"[Top Level]", "A", "B", "C"},
|
||||
State: specStateCase.state,
|
||||
RunTime: 5 * time.Second,
|
||||
Failure: types.SpecFailure{
|
||||
ComponentCodeLocation: codelocation.New(0),
|
||||
Message: "I failed",
|
||||
},
|
||||
}
|
||||
reporter.SpecWillRun(spec)
|
||||
reporter.SpecDidComplete(spec)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 1,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record test as failing", func() {
|
||||
output := readOutputFile()
|
||||
Ω(output.Tests).Should(Equal(1))
|
||||
Ω(output.Failures).Should(Equal(1))
|
||||
Ω(output.Time).Should(Equal(10.0))
|
||||
Ω(output.TestCases[0].Name).Should(Equal("A B C"))
|
||||
Ω(output.TestCases[0].ClassName).Should(Equal("My test suite"))
|
||||
Ω(output.TestCases[0].FailureMessage.Type).Should(Equal(specStateCase.message))
|
||||
Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring("I failed"))
|
||||
Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring(spec.Failure.ComponentCodeLocation.String()))
|
||||
Ω(output.TestCases[0].Skipped).Should(BeNil())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
for _, specStateCase := range []types.SpecState{types.SpecStatePending, types.SpecStateSkipped} {
|
||||
specStateCase := specStateCase
|
||||
Describe("a skipped test", func() {
|
||||
var spec *types.SpecSummary
|
||||
BeforeEach(func() {
|
||||
spec = &types.SpecSummary{
|
||||
ComponentTexts: []string{"[Top Level]", "A", "B", "C"},
|
||||
State: specStateCase,
|
||||
RunTime: 5 * time.Second,
|
||||
}
|
||||
reporter.SpecWillRun(spec)
|
||||
reporter.SpecDidComplete(spec)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 0,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record test as failing", func() {
|
||||
output := readOutputFile()
|
||||
Ω(output.Tests).Should(Equal(1))
|
||||
Ω(output.Failures).Should(Equal(0))
|
||||
Ω(output.Time).Should(Equal(10.0))
|
||||
Ω(output.TestCases[0].Name).Should(Equal("A B C"))
|
||||
Ω(output.TestCases[0].Skipped).ShouldNot(BeNil())
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/reporters_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/reporters_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package reporters_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestReporters(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Reporters Suite")
|
||||
}
|
||||
213
Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/teamcity_reporter_test.go
generated
vendored
213
Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/teamcity_reporter_test.go
generated
vendored
@@ -1,213 +0,0 @@
|
||||
package reporters_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
. "github.com/onsi/ginkgo"
|
||||
"github.com/onsi/ginkgo/config"
|
||||
"github.com/onsi/ginkgo/internal/codelocation"
|
||||
"github.com/onsi/ginkgo/reporters"
|
||||
"github.com/onsi/ginkgo/types"
|
||||
. "github.com/onsi/gomega"
|
||||
"time"
|
||||
)
|
||||
|
||||
var _ = Describe("TeamCity Reporter", func() {
|
||||
var (
|
||||
buffer bytes.Buffer
|
||||
reporter Reporter
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
buffer.Truncate(0)
|
||||
reporter = reporters.NewTeamCityReporter(&buffer)
|
||||
reporter.SpecSuiteWillBegin(config.GinkgoConfigType{}, &types.SuiteSummary{
|
||||
SuiteDescription: "Foo's test suite",
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
})
|
||||
})
|
||||
|
||||
Describe("a passing test", func() {
|
||||
BeforeEach(func() {
|
||||
beforeSuite := &types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
}
|
||||
reporter.BeforeSuiteDidRun(beforeSuite)
|
||||
|
||||
afterSuite := &types.SetupSummary{
|
||||
State: types.SpecStatePassed,
|
||||
}
|
||||
reporter.AfterSuiteDidRun(afterSuite)
|
||||
|
||||
spec := &types.SpecSummary{
|
||||
ComponentTexts: []string{"[Top Level]", "A", "B", "C"},
|
||||
State: types.SpecStatePassed,
|
||||
RunTime: 5 * time.Second,
|
||||
}
|
||||
reporter.SpecWillRun(spec)
|
||||
reporter.SpecDidComplete(spec)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 0,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record the test as passing", func() {
|
||||
actual := buffer.String()
|
||||
expected :=
|
||||
"##teamcity[testSuiteStarted name='Foo|'s test suite']" +
|
||||
"##teamcity[testStarted name='A B C']" +
|
||||
"##teamcity[testFinished name='A B C' duration='5000']" +
|
||||
"##teamcity[testSuiteFinished name='Foo|'s test suite']"
|
||||
Ω(actual).Should(Equal(expected))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("when the BeforeSuite fails", func() {
|
||||
var beforeSuite *types.SetupSummary
|
||||
|
||||
BeforeEach(func() {
|
||||
beforeSuite = &types.SetupSummary{
|
||||
State: types.SpecStateFailed,
|
||||
RunTime: 3 * time.Second,
|
||||
Failure: types.SpecFailure{
|
||||
Message: "failed to setup\n",
|
||||
ComponentCodeLocation: codelocation.New(0),
|
||||
},
|
||||
}
|
||||
reporter.BeforeSuiteDidRun(beforeSuite)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 1,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record the test as having failed", func() {
|
||||
actual := buffer.String()
|
||||
expected := fmt.Sprintf(
|
||||
"##teamcity[testSuiteStarted name='Foo|'s test suite']"+
|
||||
"##teamcity[testStarted name='BeforeSuite']"+
|
||||
"##teamcity[testFailed name='BeforeSuite' message='%s' details='failed to setup|n']"+
|
||||
"##teamcity[testFinished name='BeforeSuite' duration='3000']"+
|
||||
"##teamcity[testSuiteFinished name='Foo|'s test suite']", beforeSuite.Failure.ComponentCodeLocation.String(),
|
||||
)
|
||||
Ω(actual).Should(Equal(expected))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("when the AfterSuite fails", func() {
|
||||
var afterSuite *types.SetupSummary
|
||||
|
||||
BeforeEach(func() {
|
||||
afterSuite = &types.SetupSummary{
|
||||
State: types.SpecStateFailed,
|
||||
RunTime: 3 * time.Second,
|
||||
Failure: types.SpecFailure{
|
||||
Message: "failed to setup\n",
|
||||
ComponentCodeLocation: codelocation.New(0),
|
||||
},
|
||||
}
|
||||
reporter.AfterSuiteDidRun(afterSuite)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 1,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record the test as having failed", func() {
|
||||
actual := buffer.String()
|
||||
expected := fmt.Sprintf(
|
||||
"##teamcity[testSuiteStarted name='Foo|'s test suite']"+
|
||||
"##teamcity[testStarted name='AfterSuite']"+
|
||||
"##teamcity[testFailed name='AfterSuite' message='%s' details='failed to setup|n']"+
|
||||
"##teamcity[testFinished name='AfterSuite' duration='3000']"+
|
||||
"##teamcity[testSuiteFinished name='Foo|'s test suite']", afterSuite.Failure.ComponentCodeLocation.String(),
|
||||
)
|
||||
Ω(actual).Should(Equal(expected))
|
||||
})
|
||||
})
|
||||
specStateCases := []struct {
|
||||
state types.SpecState
|
||||
message string
|
||||
}{
|
||||
{types.SpecStateFailed, "Failure"},
|
||||
{types.SpecStateTimedOut, "Timeout"},
|
||||
{types.SpecStatePanicked, "Panic"},
|
||||
}
|
||||
|
||||
for _, specStateCase := range specStateCases {
|
||||
specStateCase := specStateCase
|
||||
Describe("a failing test", func() {
|
||||
var spec *types.SpecSummary
|
||||
BeforeEach(func() {
|
||||
spec = &types.SpecSummary{
|
||||
ComponentTexts: []string{"[Top Level]", "A", "B", "C"},
|
||||
State: specStateCase.state,
|
||||
RunTime: 5 * time.Second,
|
||||
Failure: types.SpecFailure{
|
||||
ComponentCodeLocation: codelocation.New(0),
|
||||
Message: "I failed",
|
||||
},
|
||||
}
|
||||
reporter.SpecWillRun(spec)
|
||||
reporter.SpecDidComplete(spec)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 1,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record test as failing", func() {
|
||||
actual := buffer.String()
|
||||
expected :=
|
||||
fmt.Sprintf("##teamcity[testSuiteStarted name='Foo|'s test suite']"+
|
||||
"##teamcity[testStarted name='A B C']"+
|
||||
"##teamcity[testFailed name='A B C' message='%s' details='I failed']"+
|
||||
"##teamcity[testFinished name='A B C' duration='5000']"+
|
||||
"##teamcity[testSuiteFinished name='Foo|'s test suite']", spec.Failure.ComponentCodeLocation.String())
|
||||
Ω(actual).Should(Equal(expected))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
for _, specStateCase := range []types.SpecState{types.SpecStatePending, types.SpecStateSkipped} {
|
||||
specStateCase := specStateCase
|
||||
Describe("a skipped test", func() {
|
||||
var spec *types.SpecSummary
|
||||
BeforeEach(func() {
|
||||
spec = &types.SpecSummary{
|
||||
ComponentTexts: []string{"[Top Level]", "A", "B", "C"},
|
||||
State: specStateCase,
|
||||
RunTime: 5 * time.Second,
|
||||
}
|
||||
reporter.SpecWillRun(spec)
|
||||
reporter.SpecDidComplete(spec)
|
||||
|
||||
reporter.SpecSuiteDidEnd(&types.SuiteSummary{
|
||||
NumberOfSpecsThatWillBeRun: 1,
|
||||
NumberOfFailedSpecs: 0,
|
||||
RunTime: 10 * time.Second,
|
||||
})
|
||||
})
|
||||
|
||||
It("should record test as ignored", func() {
|
||||
actual := buffer.String()
|
||||
expected :=
|
||||
"##teamcity[testSuiteStarted name='Foo|'s test suite']" +
|
||||
"##teamcity[testStarted name='A B C']" +
|
||||
"##teamcity[testIgnored name='A B C']" +
|
||||
"##teamcity[testFinished name='A B C' duration='5000']" +
|
||||
"##teamcity[testSuiteFinished name='Foo|'s test suite']"
|
||||
Ω(actual).Should(Equal(expected))
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package types_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTypes(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Types Suite")
|
||||
}
|
||||
81
Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_test.go
generated
vendored
81
Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_test.go
generated
vendored
@@ -1,81 +0,0 @@
|
||||
package types_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo/types"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var specStates = []SpecState{
|
||||
SpecStatePassed,
|
||||
SpecStateTimedOut,
|
||||
SpecStatePanicked,
|
||||
SpecStateFailed,
|
||||
SpecStatePending,
|
||||
SpecStateSkipped,
|
||||
}
|
||||
|
||||
func verifySpecSummary(caller func(SpecSummary) bool, trueStates ...SpecState) {
|
||||
summary := SpecSummary{}
|
||||
trueStateLookup := map[SpecState]bool{}
|
||||
for _, state := range trueStates {
|
||||
trueStateLookup[state] = true
|
||||
summary.State = state
|
||||
Ω(caller(summary)).Should(BeTrue())
|
||||
}
|
||||
|
||||
for _, state := range specStates {
|
||||
if trueStateLookup[state] {
|
||||
continue
|
||||
}
|
||||
summary.State = state
|
||||
Ω(caller(summary)).Should(BeFalse())
|
||||
}
|
||||
}
|
||||
|
||||
var _ = Describe("Types", func() {
|
||||
Describe("SpecSummary", func() {
|
||||
It("knows when it is in a failure-like state", func() {
|
||||
verifySpecSummary(func(summary SpecSummary) bool {
|
||||
return summary.HasFailureState()
|
||||
}, SpecStateTimedOut, SpecStatePanicked, SpecStateFailed)
|
||||
})
|
||||
|
||||
It("knows when it passed", func() {
|
||||
verifySpecSummary(func(summary SpecSummary) bool {
|
||||
return summary.Passed()
|
||||
}, SpecStatePassed)
|
||||
})
|
||||
|
||||
It("knows when it has failed", func() {
|
||||
verifySpecSummary(func(summary SpecSummary) bool {
|
||||
return summary.Failed()
|
||||
}, SpecStateFailed)
|
||||
})
|
||||
|
||||
It("knows when it has panicked", func() {
|
||||
verifySpecSummary(func(summary SpecSummary) bool {
|
||||
return summary.Panicked()
|
||||
}, SpecStatePanicked)
|
||||
})
|
||||
|
||||
It("knows when it has timed out", func() {
|
||||
verifySpecSummary(func(summary SpecSummary) bool {
|
||||
return summary.TimedOut()
|
||||
}, SpecStateTimedOut)
|
||||
})
|
||||
|
||||
It("knows when it is pending", func() {
|
||||
verifySpecSummary(func(summary SpecSummary) bool {
|
||||
return summary.Pending()
|
||||
}, SpecStatePending)
|
||||
})
|
||||
|
||||
It("knows when it is skipped", func() {
|
||||
verifySpecSummary(func(summary SpecSummary) bool {
|
||||
return summary.Skipped()
|
||||
}, SpecStateSkipped)
|
||||
})
|
||||
})
|
||||
})
|
||||
5
Godeps/_workspace/src/github.com/onsi/gomega/.travis.yml
generated
vendored
5
Godeps/_workspace/src/github.com/onsi/gomega/.travis.yml
generated
vendored
@@ -1,7 +1,8 @@
|
||||
language: go
|
||||
go:
|
||||
- 1.3
|
||||
|
||||
- 1.4
|
||||
- 1.5
|
||||
|
||||
install:
|
||||
- go get -v ./...
|
||||
- go get github.com/onsi/ginkgo
|
||||
|
||||
10
Godeps/_workspace/src/github.com/onsi/gomega/CHANGELOG.md
generated
vendored
10
Godeps/_workspace/src/github.com/onsi/gomega/CHANGELOG.md
generated
vendored
@@ -8,6 +8,16 @@ Improvements:
|
||||
- Added `HavePrefix` and `HaveSuffix` matchers.
|
||||
- `ghttp` can now handle concurrent requests.
|
||||
- Added `Succeed` which allows one to write `Ω(MyFunction()).Should(Succeed())`.
|
||||
- Improved `ghttp`'s behavior around failing assertions and panics:
|
||||
- If a registered handler makes a failing assertion `ghttp` will return `500`.
|
||||
- If a registered handler panics, `ghttp` will return `500` *and* fail the test. This is new behavior that may cause existing code to break. This code is almost certainly incorrect and creating a false positive.
|
||||
- `ghttp` servers can take an `io.Writer`. `ghttp` will write a line to the writer when each request arrives.
|
||||
- Added `WithTransform` matcher to allow munging input data before feeding into the relevant matcher
|
||||
- Added boolean `And`, `Or`, and `Not` matchers to allow creating composite matchers
|
||||
|
||||
Bug Fixes:
|
||||
- gexec: `session.Wait` now uses `EventuallyWithOffset` to get the right line number in the failure.
|
||||
- `ContainElement` no longer bails if a passed-in matcher errors.
|
||||
|
||||
## 1.0 (8/2/2014)
|
||||
|
||||
|
||||
13
Godeps/_workspace/src/github.com/onsi/gomega/format/format_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/gomega/format/format_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package format_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestFormat(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Format Suite")
|
||||
}
|
||||
449
Godeps/_workspace/src/github.com/onsi/gomega/format/format_test.go
generated
vendored
449
Godeps/_workspace/src/github.com/onsi/gomega/format/format_test.go
generated
vendored
@@ -1,449 +0,0 @@
|
||||
package format_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
. "github.com/onsi/gomega/format"
|
||||
"github.com/onsi/gomega/types"
|
||||
)
|
||||
|
||||
//recursive struct
|
||||
|
||||
type StringAlias string
|
||||
type ByteAlias []byte
|
||||
type IntAlias int
|
||||
|
||||
type AStruct struct {
|
||||
Exported string
|
||||
}
|
||||
|
||||
type SimpleStruct struct {
|
||||
Name string
|
||||
Enumeration int
|
||||
Veritas bool
|
||||
Data []byte
|
||||
secret uint32
|
||||
}
|
||||
|
||||
type ComplexStruct struct {
|
||||
Strings []string
|
||||
SimpleThings []*SimpleStruct
|
||||
DataMaps map[int]ByteAlias
|
||||
}
|
||||
|
||||
type SecretiveStruct struct {
|
||||
boolValue bool
|
||||
intValue int
|
||||
uintValue uint
|
||||
uintptrValue uintptr
|
||||
floatValue float32
|
||||
complexValue complex64
|
||||
chanValue chan bool
|
||||
funcValue func()
|
||||
pointerValue *int
|
||||
sliceValue []string
|
||||
byteSliceValue []byte
|
||||
stringValue string
|
||||
arrValue [3]int
|
||||
byteArrValue [3]byte
|
||||
mapValue map[string]int
|
||||
structValue AStruct
|
||||
interfaceValue interface{}
|
||||
}
|
||||
|
||||
type GoStringer struct {
|
||||
}
|
||||
|
||||
func (g GoStringer) GoString() string {
|
||||
return "go-string"
|
||||
}
|
||||
|
||||
func (g GoStringer) String() string {
|
||||
return "string"
|
||||
}
|
||||
|
||||
type Stringer struct {
|
||||
}
|
||||
|
||||
func (g Stringer) String() string {
|
||||
return "string"
|
||||
}
|
||||
|
||||
var _ = Describe("Format", func() {
|
||||
match := func(typeRepresentation string, valueRepresentation string, args ...interface{}) types.GomegaMatcher {
|
||||
if len(args) > 0 {
|
||||
valueRepresentation = fmt.Sprintf(valueRepresentation, args...)
|
||||
}
|
||||
return Equal(fmt.Sprintf("%s<%s>: %s", Indent, typeRepresentation, valueRepresentation))
|
||||
}
|
||||
|
||||
matchRegexp := func(typeRepresentation string, valueRepresentation string, args ...interface{}) types.GomegaMatcher {
|
||||
if len(args) > 0 {
|
||||
valueRepresentation = fmt.Sprintf(valueRepresentation, args...)
|
||||
}
|
||||
return MatchRegexp(fmt.Sprintf("%s<%s>: %s", Indent, typeRepresentation, valueRepresentation))
|
||||
}
|
||||
|
||||
hashMatchingRegexp := func(entries ...string) string {
|
||||
entriesSwitch := "(" + strings.Join(entries, "|") + ")"
|
||||
arr := make([]string, len(entries))
|
||||
for i := range arr {
|
||||
arr[i] = entriesSwitch
|
||||
}
|
||||
return "{" + strings.Join(arr, ", ") + "}"
|
||||
}
|
||||
|
||||
Describe("Message", func() {
|
||||
Context("with only an actual value", func() {
|
||||
It("should print out an indented formatted representation of the value and the message", func() {
|
||||
Ω(Message(3, "to be three.")).Should(Equal("Expected\n <int>: 3\nto be three."))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with an actual and an expected value", func() {
|
||||
It("should print out an indented formatted representatino of both values, and the message", func() {
|
||||
Ω(Message(3, "to equal", 4)).Should(Equal("Expected\n <int>: 3\nto equal\n <int>: 4"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("IndentString", func() {
|
||||
It("should indent the string", func() {
|
||||
Ω(IndentString("foo\n bar\nbaz", 2)).Should(Equal(" foo\n bar\n baz"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Object", func() {
|
||||
Describe("formatting boolean values", func() {
|
||||
It("should give the type and format values correctly", func() {
|
||||
Ω(Object(true, 1)).Should(match("bool", "true"))
|
||||
Ω(Object(false, 1)).Should(match("bool", "false"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting numbers", func() {
|
||||
It("should give the type and format values correctly", func() {
|
||||
Ω(Object(int(3), 1)).Should(match("int", "3"))
|
||||
Ω(Object(int8(3), 1)).Should(match("int8", "3"))
|
||||
Ω(Object(int16(3), 1)).Should(match("int16", "3"))
|
||||
Ω(Object(int32(3), 1)).Should(match("int32", "3"))
|
||||
Ω(Object(int64(3), 1)).Should(match("int64", "3"))
|
||||
|
||||
Ω(Object(uint(3), 1)).Should(match("uint", "3"))
|
||||
Ω(Object(uint8(3), 1)).Should(match("uint8", "3"))
|
||||
Ω(Object(uint16(3), 1)).Should(match("uint16", "3"))
|
||||
Ω(Object(uint32(3), 1)).Should(match("uint32", "3"))
|
||||
Ω(Object(uint64(3), 1)).Should(match("uint64", "3"))
|
||||
})
|
||||
|
||||
It("should handle uintptr differently", func() {
|
||||
Ω(Object(uintptr(3), 1)).Should(match("uintptr", "0x3"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting channels", func() {
|
||||
It("should give the type and format values correctly", func() {
|
||||
c := make(chan<- bool, 3)
|
||||
c <- true
|
||||
c <- false
|
||||
Ω(Object(c, 1)).Should(match("chan<- bool | len:2, cap:3", "%v", c))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting strings", func() {
|
||||
It("should give the type and format values correctly", func() {
|
||||
s := "a\nb\nc"
|
||||
Ω(Object(s, 1)).Should(match("string", `a
|
||||
b
|
||||
c`))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting []byte slices", func() {
|
||||
It("should present them as strings", func() {
|
||||
b := []byte("a\nb\nc")
|
||||
Ω(Object(b, 1)).Should(matchRegexp(`\[\]uint8 \| len:5, cap:\d+`, `a
|
||||
b
|
||||
c`))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting functions", func() {
|
||||
It("should give the type and format values correctly", func() {
|
||||
f := func(a string, b []int) ([]byte, error) {
|
||||
return []byte("abc"), nil
|
||||
}
|
||||
Ω(Object(f, 1)).Should(match("func(string, []int) ([]uint8, error)", "%v", f))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting pointers", func() {
|
||||
It("should give the type and dereference the value to format it correctly", func() {
|
||||
a := 3
|
||||
Ω(Object(&a, 1)).Should(match(fmt.Sprintf("*int | %p", &a), "3"))
|
||||
})
|
||||
|
||||
Context("when there are pointers to pointers...", func() {
|
||||
It("should recursively deference the pointer until it gets to a value", func() {
|
||||
a := 3
|
||||
var b *int
|
||||
var c **int
|
||||
var d ***int
|
||||
b = &a
|
||||
c = &b
|
||||
d = &c
|
||||
|
||||
Ω(Object(d, 1)).Should(match(fmt.Sprintf("***int | %p", d), "3"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the pointer points to nil", func() {
|
||||
It("should say nil and not explode", func() {
|
||||
var a *AStruct
|
||||
Ω(Object(a, 1)).Should(match("*format_test.AStruct | 0x0", "nil"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting arrays", func() {
|
||||
It("should give the type and format values correctly", func() {
|
||||
w := [3]string{"Jed Bartlet", "Toby Ziegler", "CJ Cregg"}
|
||||
Ω(Object(w, 1)).Should(match("[3]string", `["Jed Bartlet", "Toby Ziegler", "CJ Cregg"]`))
|
||||
})
|
||||
|
||||
Context("with byte arrays", func() {
|
||||
It("should give the type and format values correctly", func() {
|
||||
w := [3]byte{17, 28, 19}
|
||||
Ω(Object(w, 1)).Should(match("[3]uint8", `[17, 28, 19]`))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting slices", func() {
|
||||
It("should include the length and capacity in the type information", func() {
|
||||
s := make([]bool, 3, 4)
|
||||
Ω(Object(s, 1)).Should(match("[]bool | len:3, cap:4", "[false, false, false]"))
|
||||
})
|
||||
|
||||
Context("when the slice contains long entries", func() {
|
||||
It("should format the entries with newlines", func() {
|
||||
w := []string{"Josiah Edward Bartlet", "Toby Ziegler", "CJ Cregg"}
|
||||
expected := `[
|
||||
"Josiah Edward Bartlet",
|
||||
"Toby Ziegler",
|
||||
"CJ Cregg",
|
||||
]`
|
||||
Ω(Object(w, 1)).Should(match("[]string | len:3, cap:3", expected))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting maps", func() {
|
||||
It("should include the length in the type information", func() {
|
||||
m := make(map[int]bool, 5)
|
||||
m[3] = true
|
||||
m[4] = false
|
||||
Ω(Object(m, 1)).Should(matchRegexp(`map\[int\]bool \| len:2`, hashMatchingRegexp("3: true", "4: false")))
|
||||
})
|
||||
|
||||
Context("when the slice contains long entries", func() {
|
||||
It("should format the entries with newlines", func() {
|
||||
m := map[string][]byte{}
|
||||
m["Josiah Edward Bartlet"] = []byte("Martin Sheen")
|
||||
m["Toby Ziegler"] = []byte("Richard Schiff")
|
||||
m["CJ Cregg"] = []byte("Allison Janney")
|
||||
expected := `{
|
||||
("Josiah Edward Bartlet": "Martin Sheen"|"Toby Ziegler": "Richard Schiff"|"CJ Cregg": "Allison Janney"),
|
||||
("Josiah Edward Bartlet": "Martin Sheen"|"Toby Ziegler": "Richard Schiff"|"CJ Cregg": "Allison Janney"),
|
||||
("Josiah Edward Bartlet": "Martin Sheen"|"Toby Ziegler": "Richard Schiff"|"CJ Cregg": "Allison Janney"),
|
||||
}`
|
||||
Ω(Object(m, 1)).Should(matchRegexp(`map\[string\]\[\]uint8 \| len:3`, expected))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting structs", func() {
|
||||
It("should include the struct name and the field names", func() {
|
||||
s := SimpleStruct{
|
||||
Name: "Oswald",
|
||||
Enumeration: 17,
|
||||
Veritas: true,
|
||||
Data: []byte("datum"),
|
||||
secret: 1983,
|
||||
}
|
||||
|
||||
Ω(Object(s, 1)).Should(match("format_test.SimpleStruct", `{Name: "Oswald", Enumeration: 17, Veritas: true, Data: "datum", secret: 1983}`))
|
||||
})
|
||||
|
||||
Context("when the struct contains long entries", func() {
|
||||
It("should format the entries with new lines", func() {
|
||||
s := &SimpleStruct{
|
||||
Name: "Mithrandir Gandalf Greyhame",
|
||||
Enumeration: 2021,
|
||||
Veritas: true,
|
||||
Data: []byte("wizard"),
|
||||
secret: 3,
|
||||
}
|
||||
|
||||
Ω(Object(s, 1)).Should(match(fmt.Sprintf("*format_test.SimpleStruct | %p", s), `{
|
||||
Name: "Mithrandir Gandalf Greyhame",
|
||||
Enumeration: 2021,
|
||||
Veritas: true,
|
||||
Data: "wizard",
|
||||
secret: 3,
|
||||
}`))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting nil values", func() {
|
||||
It("should print out nil", func() {
|
||||
Ω(Object(nil, 1)).Should(match("nil", "nil"))
|
||||
var typedNil *AStruct
|
||||
Ω(Object(typedNil, 1)).Should(match("*format_test.AStruct | 0x0", "nil"))
|
||||
var c chan<- bool
|
||||
Ω(Object(c, 1)).Should(match("chan<- bool | len:0, cap:0", "nil"))
|
||||
var s []string
|
||||
Ω(Object(s, 1)).Should(match("[]string | len:0, cap:0", "nil"))
|
||||
var m map[string]bool
|
||||
Ω(Object(m, 1)).Should(match("map[string]bool | len:0", "nil"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("formatting aliased types", func() {
|
||||
It("should print out the correct alias type", func() {
|
||||
Ω(Object(StringAlias("alias"), 1)).Should(match("format_test.StringAlias", `alias`))
|
||||
Ω(Object(ByteAlias("alias"), 1)).Should(matchRegexp(`format_test\.ByteAlias \| len:5, cap:\d+`, `alias`))
|
||||
Ω(Object(IntAlias(3), 1)).Should(match("format_test.IntAlias", "3"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("handling nested things", func() {
|
||||
It("should produce a correctly nested representation", func() {
|
||||
s := ComplexStruct{
|
||||
Strings: []string{"lots", "of", "short", "strings"},
|
||||
SimpleThings: []*SimpleStruct{
|
||||
{"short", 7, true, []byte("succinct"), 17},
|
||||
{"something longer", 427, true, []byte("designed to wrap around nicely"), 30},
|
||||
},
|
||||
DataMaps: map[int]ByteAlias{
|
||||
17: ByteAlias("some substantially longer chunks of data"),
|
||||
1138: ByteAlias("that should make things wrap"),
|
||||
},
|
||||
}
|
||||
expected := `{
|
||||
Strings: \["lots", "of", "short", "strings"\],
|
||||
SimpleThings: \[
|
||||
{Name: "short", Enumeration: 7, Veritas: true, Data: "succinct", secret: 17},
|
||||
{
|
||||
Name: "something longer",
|
||||
Enumeration: 427,
|
||||
Veritas: true,
|
||||
Data: "designed to wrap around nicely",
|
||||
secret: 30,
|
||||
},
|
||||
\],
|
||||
DataMaps: {
|
||||
(17: "some substantially longer chunks of data"|1138: "that should make things wrap"),
|
||||
(17: "some substantially longer chunks of data"|1138: "that should make things wrap"),
|
||||
},
|
||||
}`
|
||||
Ω(Object(s, 1)).Should(matchRegexp(`format_test\.ComplexStruct`, expected))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Handling unexported fields in structs", func() {
|
||||
It("should handle all the various types correctly", func() {
|
||||
a := int(5)
|
||||
s := SecretiveStruct{
|
||||
boolValue: true,
|
||||
intValue: 3,
|
||||
uintValue: 4,
|
||||
uintptrValue: 5,
|
||||
floatValue: 6.0,
|
||||
complexValue: complex(5.0, 3.0),
|
||||
chanValue: make(chan bool, 2),
|
||||
funcValue: func() {},
|
||||
pointerValue: &a,
|
||||
sliceValue: []string{"string", "slice"},
|
||||
byteSliceValue: []byte("bytes"),
|
||||
stringValue: "a string",
|
||||
arrValue: [3]int{11, 12, 13},
|
||||
byteArrValue: [3]byte{17, 20, 32},
|
||||
mapValue: map[string]int{"a key": 20, "b key": 30},
|
||||
structValue: AStruct{"exported"},
|
||||
interfaceValue: map[string]int{"a key": 17},
|
||||
}
|
||||
|
||||
expected := fmt.Sprintf(`{
|
||||
boolValue: true,
|
||||
intValue: 3,
|
||||
uintValue: 4,
|
||||
uintptrValue: 0x5,
|
||||
floatValue: 6,
|
||||
complexValue: \(5\+3i\),
|
||||
chanValue: %p,
|
||||
funcValue: %p,
|
||||
pointerValue: 5,
|
||||
sliceValue: \["string", "slice"\],
|
||||
byteSliceValue: "bytes",
|
||||
stringValue: "a string",
|
||||
arrValue: \[11, 12, 13\],
|
||||
byteArrValue: \[17, 20, 32\],
|
||||
mapValue: %s,
|
||||
structValue: {Exported: "exported"},
|
||||
interfaceValue: {"a key": 17},
|
||||
}`, s.chanValue, s.funcValue, hashMatchingRegexp(`"a key": 20`, `"b key": 30`))
|
||||
|
||||
Ω(Object(s, 1)).Should(matchRegexp(`format_test\.SecretiveStruct`, expected))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Handling interfaces", func() {
|
||||
It("should unpack the interface", func() {
|
||||
outerHash := map[string]interface{}{}
|
||||
innerHash := map[string]int{}
|
||||
|
||||
innerHash["inner"] = 3
|
||||
outerHash["integer"] = 2
|
||||
outerHash["map"] = innerHash
|
||||
|
||||
expected := hashMatchingRegexp(`"integer": 2`, `"map": {"inner": 3}`)
|
||||
Ω(Object(outerHash, 1)).Should(matchRegexp(`map\[string\]interface {} \| len:2`, expected))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Handling recursive things", func() {
|
||||
It("should not go crazy...", func() {
|
||||
m := map[string]interface{}{}
|
||||
m["integer"] = 2
|
||||
m["map"] = m
|
||||
Ω(Object(m, 1)).Should(ContainSubstring("..."))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("When instructed to use the Stringer representation", func() {
|
||||
BeforeEach(func() {
|
||||
UseStringerRepresentation = true
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
UseStringerRepresentation = false
|
||||
})
|
||||
|
||||
Context("when passed a GoStringer", func() {
|
||||
It("should use what GoString() returns", func() {
|
||||
Ω(Object(GoStringer{}, 1)).Should(ContainSubstring("<format_test.GoStringer>: go-string"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when passed a stringer", func() {
|
||||
It("should use what String() returns", func() {
|
||||
Ω(Object(Stringer{}, 1)).Should(ContainSubstring("<format_test.Stringer>: string"))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
158
Godeps/_workspace/src/github.com/onsi/gomega/gbytes/buffer_test.go
generated
vendored
158
Godeps/_workspace/src/github.com/onsi/gomega/gbytes/buffer_test.go
generated
vendored
@@ -1,158 +0,0 @@
|
||||
package gbytes_test
|
||||
|
||||
import (
|
||||
"io"
|
||||
"time"
|
||||
|
||||
. "github.com/onsi/gomega/gbytes"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Buffer", func() {
|
||||
var buffer *Buffer
|
||||
|
||||
BeforeEach(func() {
|
||||
buffer = NewBuffer()
|
||||
})
|
||||
|
||||
Describe("dumping the entire contents of the buffer", func() {
|
||||
It("should return everything that's been written", func() {
|
||||
buffer.Write([]byte("abc"))
|
||||
buffer.Write([]byte("def"))
|
||||
Ω(buffer.Contents()).Should(Equal([]byte("abcdef")))
|
||||
|
||||
Ω(buffer).Should(Say("bcd"))
|
||||
Ω(buffer.Contents()).Should(Equal([]byte("abcdef")))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("creating a buffer with bytes", func() {
|
||||
It("should create the buffer with the cursor set to the beginning", func() {
|
||||
buffer := BufferWithBytes([]byte("abcdef"))
|
||||
Ω(buffer.Contents()).Should(Equal([]byte("abcdef")))
|
||||
Ω(buffer).Should(Say("abc"))
|
||||
Ω(buffer).ShouldNot(Say("abc"))
|
||||
Ω(buffer).Should(Say("def"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("reading from a buffer", func() {
|
||||
It("should read the current contents of the buffer", func() {
|
||||
buffer := BufferWithBytes([]byte("abcde"))
|
||||
|
||||
dest := make([]byte, 3)
|
||||
n, err := buffer.Read(dest)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(n).Should(Equal(3))
|
||||
Ω(string(dest)).Should(Equal("abc"))
|
||||
|
||||
dest = make([]byte, 3)
|
||||
n, err = buffer.Read(dest)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(n).Should(Equal(2))
|
||||
Ω(string(dest[:n])).Should(Equal("de"))
|
||||
|
||||
n, err = buffer.Read(dest)
|
||||
Ω(err).Should(Equal(io.EOF))
|
||||
Ω(n).Should(Equal(0))
|
||||
})
|
||||
|
||||
Context("after the buffer has been closed", func() {
|
||||
It("returns an error", func() {
|
||||
buffer := BufferWithBytes([]byte("abcde"))
|
||||
|
||||
buffer.Close()
|
||||
|
||||
dest := make([]byte, 3)
|
||||
n, err := buffer.Read(dest)
|
||||
Ω(err).Should(HaveOccurred())
|
||||
Ω(n).Should(Equal(0))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("detecting regular expressions", func() {
|
||||
It("should fire the appropriate channel when the passed in pattern matches, then close it", func(done Done) {
|
||||
go func() {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
buffer.Write([]byte("abcde"))
|
||||
}()
|
||||
|
||||
A := buffer.Detect("%s", "a.c")
|
||||
B := buffer.Detect("def")
|
||||
|
||||
var gotIt bool
|
||||
select {
|
||||
case gotIt = <-A:
|
||||
case <-B:
|
||||
Fail("should not have gotten here")
|
||||
}
|
||||
|
||||
Ω(gotIt).Should(BeTrue())
|
||||
Eventually(A).Should(BeClosed())
|
||||
|
||||
buffer.Write([]byte("f"))
|
||||
Eventually(B).Should(Receive())
|
||||
Eventually(B).Should(BeClosed())
|
||||
|
||||
close(done)
|
||||
})
|
||||
|
||||
It("should fast-forward the buffer upon detection", func(done Done) {
|
||||
buffer.Write([]byte("abcde"))
|
||||
<-buffer.Detect("abc")
|
||||
Ω(buffer).ShouldNot(Say("abc"))
|
||||
Ω(buffer).Should(Say("de"))
|
||||
close(done)
|
||||
})
|
||||
|
||||
It("should only fast-forward the buffer when the channel is read, and only if doing so would not rewind it", func(done Done) {
|
||||
buffer.Write([]byte("abcde"))
|
||||
A := buffer.Detect("abc")
|
||||
time.Sleep(20 * time.Millisecond) //give the goroutine a chance to detect and write to the channel
|
||||
Ω(buffer).Should(Say("abcd"))
|
||||
<-A
|
||||
Ω(buffer).ShouldNot(Say("d"))
|
||||
Ω(buffer).Should(Say("e"))
|
||||
Eventually(A).Should(BeClosed())
|
||||
close(done)
|
||||
})
|
||||
|
||||
It("should be possible to cancel a detection", func(done Done) {
|
||||
A := buffer.Detect("abc")
|
||||
B := buffer.Detect("def")
|
||||
buffer.CancelDetects()
|
||||
buffer.Write([]byte("abcdef"))
|
||||
Eventually(A).Should(BeClosed())
|
||||
Eventually(B).Should(BeClosed())
|
||||
|
||||
Ω(buffer).Should(Say("bcde"))
|
||||
<-buffer.Detect("f")
|
||||
close(done)
|
||||
})
|
||||
})
|
||||
|
||||
Describe("closing the buffer", func() {
|
||||
It("should error when further write attempts are made", func() {
|
||||
_, err := buffer.Write([]byte("abc"))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
buffer.Close()
|
||||
|
||||
_, err = buffer.Write([]byte("def"))
|
||||
Ω(err).Should(HaveOccurred())
|
||||
|
||||
Ω(buffer.Contents()).Should(Equal([]byte("abc")))
|
||||
})
|
||||
|
||||
It("should be closed", func() {
|
||||
Ω(buffer.Closed()).Should(BeFalse())
|
||||
|
||||
buffer.Close()
|
||||
|
||||
Ω(buffer.Closed()).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/gomega/gbytes/gbuffer_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/gomega/gbytes/gbuffer_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package gbytes_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGbytes(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Gbytes Suite")
|
||||
}
|
||||
163
Godeps/_workspace/src/github.com/onsi/gomega/gbytes/say_matcher_test.go
generated
vendored
163
Godeps/_workspace/src/github.com/onsi/gomega/gbytes/say_matcher_test.go
generated
vendored
@@ -1,163 +0,0 @@
|
||||
package gbytes_test
|
||||
|
||||
import (
|
||||
"time"
|
||||
. "github.com/onsi/gomega/gbytes"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type speaker struct {
|
||||
buffer *Buffer
|
||||
}
|
||||
|
||||
func (s *speaker) Buffer() *Buffer {
|
||||
return s.buffer
|
||||
}
|
||||
|
||||
var _ = Describe("SayMatcher", func() {
|
||||
var buffer *Buffer
|
||||
|
||||
BeforeEach(func() {
|
||||
buffer = NewBuffer()
|
||||
buffer.Write([]byte("abc"))
|
||||
})
|
||||
|
||||
Context("when actual is not a gexec Buffer, or a BufferProvider", func() {
|
||||
It("should error", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Ω("foo").Should(Say("foo"))
|
||||
})
|
||||
Ω(failures[0]).Should(ContainSubstring("*gbytes.Buffer"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a match is found", func() {
|
||||
It("should succeed", func() {
|
||||
Ω(buffer).Should(Say("abc"))
|
||||
})
|
||||
|
||||
It("should support printf-like formatting", func() {
|
||||
Ω(buffer).Should(Say("a%sc", "b"))
|
||||
})
|
||||
|
||||
It("should use a regular expression", func() {
|
||||
Ω(buffer).Should(Say("a.c"))
|
||||
})
|
||||
|
||||
It("should fastforward the buffer", func() {
|
||||
buffer.Write([]byte("def"))
|
||||
Ω(buffer).Should(Say("abcd"))
|
||||
Ω(buffer).Should(Say("ef"))
|
||||
Ω(buffer).ShouldNot(Say("[a-z]"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when no match is found", func() {
|
||||
It("should not error", func() {
|
||||
Ω(buffer).ShouldNot(Say("def"))
|
||||
})
|
||||
|
||||
Context("when the buffer is closed", func() {
|
||||
BeforeEach(func() {
|
||||
buffer.Close()
|
||||
})
|
||||
|
||||
It("should abort an eventually", func() {
|
||||
t := time.Now()
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Eventually(buffer).Should(Say("def"))
|
||||
})
|
||||
Eventually(buffer).ShouldNot(Say("def"))
|
||||
Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond))
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
|
||||
t = time.Now()
|
||||
Eventually(buffer).Should(Say("abc"))
|
||||
Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond))
|
||||
})
|
||||
|
||||
It("should abort a consistently", func() {
|
||||
t := time.Now()
|
||||
Consistently(buffer, 2.0).ShouldNot(Say("def"))
|
||||
Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond))
|
||||
})
|
||||
|
||||
It("should not error with a synchronous matcher", func() {
|
||||
Ω(buffer).ShouldNot(Say("def"))
|
||||
Ω(buffer).Should(Say("abc"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a positive match fails", func() {
|
||||
It("should report where it got stuck", func() {
|
||||
Ω(buffer).Should(Say("abc"))
|
||||
buffer.Write([]byte("def"))
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Ω(buffer).Should(Say("abc"))
|
||||
})
|
||||
Ω(failures[0]).Should(ContainSubstring("Got stuck at:"))
|
||||
Ω(failures[0]).Should(ContainSubstring("def"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a negative match fails", func() {
|
||||
It("should report where it got stuck", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Ω(buffer).ShouldNot(Say("abc"))
|
||||
})
|
||||
Ω(failures[0]).Should(ContainSubstring("Saw:"))
|
||||
Ω(failures[0]).Should(ContainSubstring("Which matches the unexpected:"))
|
||||
Ω(failures[0]).Should(ContainSubstring("abc"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a match is not found", func() {
|
||||
It("should not fastforward the buffer", func() {
|
||||
Ω(buffer).ShouldNot(Say("def"))
|
||||
Ω(buffer).Should(Say("abc"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("a nice real-life example", func() {
|
||||
It("should behave well", func() {
|
||||
Ω(buffer).Should(Say("abc"))
|
||||
go func() {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
buffer.Write([]byte("def"))
|
||||
}()
|
||||
Ω(buffer).ShouldNot(Say("def"))
|
||||
Eventually(buffer).Should(Say("def"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when actual is a BufferProvider", func() {
|
||||
It("should use actual's buffer", func() {
|
||||
s := &speaker{
|
||||
buffer: NewBuffer(),
|
||||
}
|
||||
|
||||
Ω(s).ShouldNot(Say("abc"))
|
||||
|
||||
s.Buffer().Write([]byte("abc"))
|
||||
Ω(s).Should(Say("abc"))
|
||||
})
|
||||
|
||||
It("should abort an eventually", func() {
|
||||
s := &speaker{
|
||||
buffer: NewBuffer(),
|
||||
}
|
||||
|
||||
s.buffer.Close()
|
||||
|
||||
t := time.Now()
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Eventually(s).Should(Say("def"))
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond))
|
||||
})
|
||||
})
|
||||
})
|
||||
113
Godeps/_workspace/src/github.com/onsi/gomega/gexec/exit_matcher_test.go
generated
vendored
113
Godeps/_workspace/src/github.com/onsi/gomega/gexec/exit_matcher_test.go
generated
vendored
@@ -1,113 +0,0 @@
|
||||
package gexec_test
|
||||
|
||||
import (
|
||||
"os/exec"
|
||||
"time"
|
||||
. "github.com/onsi/gomega/gexec"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type NeverExits struct{}
|
||||
|
||||
func (e NeverExits) ExitCode() int {
|
||||
return -1
|
||||
}
|
||||
|
||||
var _ = Describe("ExitMatcher", func() {
|
||||
var command *exec.Cmd
|
||||
var session *Session
|
||||
|
||||
BeforeEach(func() {
|
||||
var err error
|
||||
command = exec.Command(fireflyPath, "0")
|
||||
session, err = Start(command, nil, nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
Describe("when passed something that is an Exiter", func() {
|
||||
It("should act normally", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Ω(NeverExits{}).Should(Exit())
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("Expected process to exit. It did not."))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("when passed something that is not an Exiter", func() {
|
||||
It("should error", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Ω("aardvark").Should(Exit())
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("Exit must be passed a gexec.Exiter"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with no exit code", func() {
|
||||
It("should say the right things when it fails", func() {
|
||||
Ω(session).ShouldNot(Exit())
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Ω(session).Should(Exit())
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("Expected process to exit. It did not."))
|
||||
|
||||
Eventually(session).Should(Exit())
|
||||
|
||||
Ω(session).Should(Exit())
|
||||
|
||||
failures = InterceptGomegaFailures(func() {
|
||||
Ω(session).ShouldNot(Exit())
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("Expected process not to exit. It did."))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with an exit code", func() {
|
||||
It("should say the right things when it fails", func() {
|
||||
Ω(session).ShouldNot(Exit(0))
|
||||
Ω(session).ShouldNot(Exit(1))
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Ω(session).Should(Exit(0))
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("Expected process to exit. It did not."))
|
||||
|
||||
Eventually(session).Should(Exit(0))
|
||||
|
||||
Ω(session).Should(Exit(0))
|
||||
|
||||
failures = InterceptGomegaFailures(func() {
|
||||
Ω(session).Should(Exit(1))
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("to match exit code:"))
|
||||
|
||||
Ω(session).ShouldNot(Exit(1))
|
||||
|
||||
failures = InterceptGomegaFailures(func() {
|
||||
Ω(session).ShouldNot(Exit(0))
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("not to match exit code:"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("bailing out early", func() {
|
||||
It("should bail out early once the process exits", func() {
|
||||
t := time.Now()
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Eventually(session).Should(Exit(1))
|
||||
})
|
||||
Ω(time.Since(t)).Should(BeNumerically("<=", 500*time.Millisecond))
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
})
|
||||
})
|
||||
26
Godeps/_workspace/src/github.com/onsi/gomega/gexec/gexec_suite_test.go
generated
vendored
26
Godeps/_workspace/src/github.com/onsi/gomega/gexec/gexec_suite_test.go
generated
vendored
@@ -1,26 +0,0 @@
|
||||
package gexec_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/gexec"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
var fireflyPath string
|
||||
|
||||
func TestGexec(t *testing.T) {
|
||||
BeforeSuite(func() {
|
||||
var err error
|
||||
fireflyPath, err = gexec.Build("./_fixture/firefly")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
AfterSuite(func() {
|
||||
gexec.CleanupBuildArtifacts()
|
||||
})
|
||||
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Gexec Suite")
|
||||
}
|
||||
43
Godeps/_workspace/src/github.com/onsi/gomega/gexec/prefixed_writer_test.go
generated
vendored
43
Godeps/_workspace/src/github.com/onsi/gomega/gexec/prefixed_writer_test.go
generated
vendored
@@ -1,43 +0,0 @@
|
||||
package gexec_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
. "github.com/onsi/gomega/gexec"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("PrefixedWriter", func() {
|
||||
var buffer *bytes.Buffer
|
||||
var writer *PrefixedWriter
|
||||
BeforeEach(func() {
|
||||
buffer = &bytes.Buffer{}
|
||||
writer = NewPrefixedWriter("[p]", buffer)
|
||||
})
|
||||
|
||||
It("should emit the prefix on newlines", func() {
|
||||
writer.Write([]byte("abc"))
|
||||
writer.Write([]byte("def\n"))
|
||||
writer.Write([]byte("hij\n"))
|
||||
writer.Write([]byte("\n\n"))
|
||||
writer.Write([]byte("klm\n\nnop"))
|
||||
writer.Write([]byte(""))
|
||||
writer.Write([]byte("qrs"))
|
||||
writer.Write([]byte("\ntuv\nwx"))
|
||||
writer.Write([]byte("yz\n\n"))
|
||||
|
||||
Ω(buffer.String()).Should(Equal(`[p]abcdef
|
||||
[p]hij
|
||||
[p]
|
||||
[p]
|
||||
[p]klm
|
||||
[p]
|
||||
[p]nopqrs
|
||||
[p]tuv
|
||||
[p]wxyz
|
||||
[p]
|
||||
`))
|
||||
})
|
||||
})
|
||||
2
Godeps/_workspace/src/github.com/onsi/gomega/gexec/session.go
generated
vendored
2
Godeps/_workspace/src/github.com/onsi/gomega/gexec/session.go
generated
vendored
@@ -137,7 +137,7 @@ will wait for the command to exit then return the entirety of Out's contents.
|
||||
Wait uses eventually under the hood and accepts the same timeout/polling intervals that eventually does.
|
||||
*/
|
||||
func (s *Session) Wait(timeout ...interface{}) *Session {
|
||||
Eventually(s, timeout...).Should(Exit())
|
||||
EventuallyWithOffset(1, s, timeout...).Should(Exit())
|
||||
return s
|
||||
}
|
||||
|
||||
|
||||
177
Godeps/_workspace/src/github.com/onsi/gomega/gexec/session_test.go
generated
vendored
177
Godeps/_workspace/src/github.com/onsi/gomega/gexec/session_test.go
generated
vendored
@@ -1,177 +0,0 @@
|
||||
package gexec_test
|
||||
|
||||
import (
|
||||
"os/exec"
|
||||
"syscall"
|
||||
"time"
|
||||
. "github.com/onsi/gomega/gbytes"
|
||||
. "github.com/onsi/gomega/gexec"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Session", func() {
|
||||
var command *exec.Cmd
|
||||
var session *Session
|
||||
|
||||
var outWriter, errWriter *Buffer
|
||||
|
||||
BeforeEach(func() {
|
||||
outWriter = nil
|
||||
errWriter = nil
|
||||
})
|
||||
|
||||
JustBeforeEach(func() {
|
||||
command = exec.Command(fireflyPath)
|
||||
var err error
|
||||
session, err = Start(command, outWriter, errWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
Context("running a command", func() {
|
||||
It("should start the process", func() {
|
||||
Ω(command.Process).ShouldNot(BeNil())
|
||||
})
|
||||
|
||||
It("should wrap the process's stdout and stderr with gbytes buffers", func(done Done) {
|
||||
Eventually(session.Out).Should(Say("We've done the impossible, and that makes us mighty"))
|
||||
Eventually(session.Err).Should(Say("Ah, curse your sudden but inevitable betrayal!"))
|
||||
defer session.Out.CancelDetects()
|
||||
|
||||
select {
|
||||
case <-session.Out.Detect("Can we maybe vote on the whole murdering people issue"):
|
||||
Eventually(session).Should(Exit(0))
|
||||
case <-session.Out.Detect("I swear by my pretty floral bonnet, I will end you."):
|
||||
Eventually(session).Should(Exit(1))
|
||||
case <-session.Out.Detect("My work's illegal, but at least it's honest."):
|
||||
Eventually(session).Should(Exit(2))
|
||||
}
|
||||
|
||||
close(done)
|
||||
})
|
||||
|
||||
It("should satisfy the gbytes.BufferProvider interface, passing Stdout", func() {
|
||||
Eventually(session).Should(Say("We've done the impossible, and that makes us mighty"))
|
||||
Eventually(session).Should(Exit())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("providing the exit code", func() {
|
||||
It("should provide the app's exit code", func() {
|
||||
Ω(session.ExitCode()).Should(Equal(-1))
|
||||
|
||||
Eventually(session).Should(Exit())
|
||||
Ω(session.ExitCode()).Should(BeNumerically(">=", 0))
|
||||
Ω(session.ExitCode()).Should(BeNumerically("<", 3))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("wait", func() {
|
||||
It("should wait till the command exits", func() {
|
||||
Ω(session.ExitCode()).Should(Equal(-1))
|
||||
Ω(session.Wait().ExitCode()).Should(BeNumerically(">=", 0))
|
||||
Ω(session.Wait().ExitCode()).Should(BeNumerically("<", 3))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("exited", func() {
|
||||
It("should close when the command exits", func() {
|
||||
Eventually(session.Exited).Should(BeClosed())
|
||||
Ω(session.ExitCode()).ShouldNot(Equal(-1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("kill", func() {
|
||||
It("should kill the command and wait for it to exit", func() {
|
||||
session, err := Start(exec.Command("sleep", "10000000"), GinkgoWriter, GinkgoWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
session.Kill()
|
||||
Ω(session).ShouldNot(Exit(), "Should not exit immediately...")
|
||||
Eventually(session).Should(Exit(128 + 9))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("interrupt", func() {
|
||||
It("should interrupt the command", func() {
|
||||
session, err := Start(exec.Command("sleep", "10000000"), GinkgoWriter, GinkgoWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
session.Interrupt()
|
||||
Ω(session).ShouldNot(Exit(), "Should not exit immediately...")
|
||||
Eventually(session).Should(Exit(128 + 2))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("terminate", func() {
|
||||
It("should terminate the command", func() {
|
||||
session, err := Start(exec.Command("sleep", "10000000"), GinkgoWriter, GinkgoWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
session.Terminate()
|
||||
Ω(session).ShouldNot(Exit(), "Should not exit immediately...")
|
||||
Eventually(session).Should(Exit(128 + 15))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("signal", func() {
|
||||
It("should send the signal to the command", func() {
|
||||
session, err := Start(exec.Command("sleep", "10000000"), GinkgoWriter, GinkgoWriter)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
session.Signal(syscall.SIGABRT)
|
||||
Ω(session).ShouldNot(Exit(), "Should not exit immediately...")
|
||||
Eventually(session).Should(Exit(128 + 6))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the command exits", func() {
|
||||
It("should close the buffers", func() {
|
||||
Eventually(session).Should(Exit())
|
||||
|
||||
Ω(session.Out.Closed()).Should(BeTrue())
|
||||
Ω(session.Err.Closed()).Should(BeTrue())
|
||||
|
||||
Ω(session.Out).Should(Say("We've done the impossible, and that makes us mighty"))
|
||||
})
|
||||
|
||||
var So = It
|
||||
|
||||
So("this means that eventually should short circuit", func() {
|
||||
t := time.Now()
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
Eventually(session).Should(Say("blah blah blah blah blah"))
|
||||
})
|
||||
Ω(time.Since(t)).Should(BeNumerically("<=", 500*time.Millisecond))
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when wrapping out and err", func() {
|
||||
BeforeEach(func() {
|
||||
outWriter = NewBuffer()
|
||||
errWriter = NewBuffer()
|
||||
})
|
||||
|
||||
It("should route to both the provided writers and the gbytes buffers", func() {
|
||||
Eventually(session.Out).Should(Say("We've done the impossible, and that makes us mighty"))
|
||||
Eventually(session.Err).Should(Say("Ah, curse your sudden but inevitable betrayal!"))
|
||||
|
||||
Ω(outWriter.Contents()).Should(ContainSubstring("We've done the impossible, and that makes us mighty"))
|
||||
Ω(errWriter.Contents()).Should(ContainSubstring("Ah, curse your sudden but inevitable betrayal!"))
|
||||
|
||||
Eventually(session).Should(Exit())
|
||||
|
||||
Ω(outWriter.Contents()).Should(Equal(session.Out.Contents()))
|
||||
Ω(errWriter.Contents()).Should(Equal(session.Err.Contents()))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("when the command fails to start", func() {
|
||||
It("should return an error", func() {
|
||||
_, err := Start(exec.Command("agklsjdfas"), nil, nil)
|
||||
Ω(err).Should(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
118
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/handlers.go
generated
vendored
118
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/handlers.go
generated
vendored
@@ -6,7 +6,10 @@ import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"reflect"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/onsi/gomega/types"
|
||||
)
|
||||
@@ -36,7 +39,10 @@ func VerifyRequest(method string, path interface{}, rawQuery ...string) http.Han
|
||||
Ω(req.URL.Path).Should(Equal(path), "Path mismatch")
|
||||
}
|
||||
if len(rawQuery) > 0 {
|
||||
Ω(req.URL.RawQuery).Should(Equal(rawQuery[0]), "RawQuery mismatch")
|
||||
values, err := url.ParseQuery(rawQuery[0])
|
||||
Ω(err).ShouldNot(HaveOccurred(), "Expected RawQuery is malformed")
|
||||
|
||||
Ω(req.URL.Query()).Should(Equal(values), "RawQuery mismatch")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -84,6 +90,19 @@ func VerifyHeaderKV(key string, values ...string) http.HandlerFunc {
|
||||
return VerifyHeader(http.Header{key: values})
|
||||
}
|
||||
|
||||
//VerifyBody returns a handler that verifies that the body of the request matches the passed in byte array.
|
||||
//It does this using Equal().
|
||||
func VerifyBody(expectedBody []byte) http.HandlerFunc {
|
||||
return CombineHandlers(
|
||||
func(w http.ResponseWriter, req *http.Request) {
|
||||
body, err := ioutil.ReadAll(req.Body)
|
||||
req.Body.Close()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(body).Should(Equal(expectedBody), "Body Mismatch")
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
//VerifyJSON returns a handler that verifies that the body of the request is a valid JSON representation
|
||||
//matching the passed in JSON string. It does this using Gomega's MatchJSON method
|
||||
//
|
||||
@@ -112,6 +131,53 @@ func VerifyJSONRepresenting(object interface{}) http.HandlerFunc {
|
||||
)
|
||||
}
|
||||
|
||||
//VerifyForm returns a handler that verifies a request contains the specified form values.
|
||||
//
|
||||
//The request must contain *all* of the specified values, but it is allowed to have additional
|
||||
//form values beyond the passed in set.
|
||||
func VerifyForm(values url.Values) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
err := r.ParseForm()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
for key, vals := range values {
|
||||
Ω(r.Form[key]).Should(Equal(vals), "Form mismatch for key: %s", key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//VerifyFormKV returns a handler that verifies a request contains a form key with the specified values.
|
||||
//
|
||||
//It is a convenience wrapper around `VerifyForm` that lets you avoid having to create a `url.Values` object.
|
||||
func VerifyFormKV(key string, values ...string) http.HandlerFunc {
|
||||
return VerifyForm(url.Values{key: values})
|
||||
}
|
||||
|
||||
//VerifyProtoRepresenting returns a handler that verifies that the body of the request is a valid protobuf
|
||||
//representation of the passed message.
|
||||
//
|
||||
//VerifyProtoRepresenting also verifies that the request's content type is application/x-protobuf
|
||||
func VerifyProtoRepresenting(expected proto.Message) http.HandlerFunc {
|
||||
return CombineHandlers(
|
||||
VerifyContentType("application/x-protobuf"),
|
||||
func(w http.ResponseWriter, req *http.Request) {
|
||||
body, err := ioutil.ReadAll(req.Body)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
req.Body.Close()
|
||||
|
||||
expectedType := reflect.TypeOf(expected)
|
||||
actualValuePtr := reflect.New(expectedType.Elem())
|
||||
|
||||
actual, ok := actualValuePtr.Interface().(proto.Message)
|
||||
Ω(ok).Should(BeTrue(), "Message value is not a proto.Message")
|
||||
|
||||
err = proto.Unmarshal(body, actual)
|
||||
Ω(err).ShouldNot(HaveOccurred(), "Failed to unmarshal protobuf")
|
||||
|
||||
Ω(actual).Should(Equal(expected), "ProtoBuf Mismatch")
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func copyHeader(src http.Header, dst http.Header) {
|
||||
for key, value := range src {
|
||||
dst[key] = value
|
||||
@@ -179,7 +245,17 @@ Also, RespondWithJSONEncoded can be given an optional http.Header. The headers
|
||||
func RespondWithJSONEncoded(statusCode int, object interface{}, optionalHeader ...http.Header) http.HandlerFunc {
|
||||
data, err := json.Marshal(object)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
return RespondWith(statusCode, string(data), optionalHeader...)
|
||||
|
||||
var headers http.Header
|
||||
if len(optionalHeader) == 1 {
|
||||
headers = optionalHeader[0]
|
||||
} else {
|
||||
headers = make(http.Header)
|
||||
}
|
||||
if _, found := headers["Content-Type"]; !found {
|
||||
headers["Content-Type"] = []string{"application/json"}
|
||||
}
|
||||
return RespondWith(statusCode, string(data), headers)
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -192,14 +268,46 @@ objects.
|
||||
Also, RespondWithJSONEncodedPtr can be given an optional http.Header. The headers defined therein will be added to the response headers.
|
||||
Since the http.Header can be mutated after the fact you don't need to pass in a pointer.
|
||||
*/
|
||||
func RespondWithJSONEncodedPtr(statusCode *int, object *interface{}, optionalHeader ...http.Header) http.HandlerFunc {
|
||||
func RespondWithJSONEncodedPtr(statusCode *int, object interface{}, optionalHeader ...http.Header) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, req *http.Request) {
|
||||
data, err := json.Marshal(*object)
|
||||
data, err := json.Marshal(object)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
var headers http.Header
|
||||
if len(optionalHeader) == 1 {
|
||||
copyHeader(optionalHeader[0], w.Header())
|
||||
headers = optionalHeader[0]
|
||||
} else {
|
||||
headers = make(http.Header)
|
||||
}
|
||||
if _, found := headers["Content-Type"]; !found {
|
||||
headers["Content-Type"] = []string{"application/json"}
|
||||
}
|
||||
copyHeader(headers, w.Header())
|
||||
w.WriteHeader(*statusCode)
|
||||
w.Write(data)
|
||||
}
|
||||
}
|
||||
|
||||
//RespondWithProto returns a handler that responds to a request with the specified status code and a body
|
||||
//containing the protobuf serialization of the provided message.
|
||||
//
|
||||
//Also, RespondWithProto can be given an optional http.Header. The headers defined therein will be added to the response headers.
|
||||
func RespondWithProto(statusCode int, message proto.Message, optionalHeader ...http.Header) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, req *http.Request) {
|
||||
data, err := proto.Marshal(message)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
var headers http.Header
|
||||
if len(optionalHeader) == 1 {
|
||||
headers = optionalHeader[0]
|
||||
} else {
|
||||
headers = make(http.Header)
|
||||
}
|
||||
if _, found := headers["Content-Type"]; !found {
|
||||
headers["Content-Type"] = []string{"application/x-protobuf"}
|
||||
}
|
||||
copyHeader(headers, w.Header())
|
||||
|
||||
w.WriteHeader(statusCode)
|
||||
w.Write(data)
|
||||
}
|
||||
}
|
||||
|
||||
3
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/protobuf/protobuf.go
generated
vendored
Normal file
3
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/protobuf/protobuf.go
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
package protobuf
|
||||
|
||||
//go:generate protoc --go_out=. simple_message.proto
|
||||
55
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/protobuf/simple_message.pb.go
generated
vendored
Normal file
55
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/protobuf/simple_message.pb.go
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
// Code generated by protoc-gen-go.
|
||||
// source: simple_message.proto
|
||||
// DO NOT EDIT!
|
||||
|
||||
/*
|
||||
Package protobuf is a generated protocol buffer package.
|
||||
|
||||
It is generated from these files:
|
||||
simple_message.proto
|
||||
|
||||
It has these top-level messages:
|
||||
SimpleMessage
|
||||
*/
|
||||
package protobuf
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
type SimpleMessage struct {
|
||||
Description *string `protobuf:"bytes,1,req,name=description" json:"description,omitempty"`
|
||||
Id *int32 `protobuf:"varint,2,req,name=id" json:"id,omitempty"`
|
||||
Metadata *string `protobuf:"bytes,3,opt,name=metadata" json:"metadata,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
}
|
||||
|
||||
func (m *SimpleMessage) Reset() { *m = SimpleMessage{} }
|
||||
func (m *SimpleMessage) String() string { return proto.CompactTextString(m) }
|
||||
func (*SimpleMessage) ProtoMessage() {}
|
||||
|
||||
func (m *SimpleMessage) GetDescription() string {
|
||||
if m != nil && m.Description != nil {
|
||||
return *m.Description
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *SimpleMessage) GetId() int32 {
|
||||
if m != nil && m.Id != nil {
|
||||
return *m.Id
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *SimpleMessage) GetMetadata() string {
|
||||
if m != nil && m.Metadata != nil {
|
||||
return *m.Metadata
|
||||
}
|
||||
return ""
|
||||
}
|
||||
9
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/protobuf/simple_message.proto
generated
vendored
Normal file
9
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/protobuf/simple_message.proto
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
syntax = "proto2";
|
||||
|
||||
package protobuf;
|
||||
|
||||
message SimpleMessage {
|
||||
required string description = 1;
|
||||
required int32 id = 2;
|
||||
optional string metadata = 3;
|
||||
}
|
||||
36
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server.go
generated
vendored
36
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server.go
generated
vendored
@@ -106,11 +106,14 @@ A more comprehensive example is available at https://onsi.github.io/gomega/#_tes
|
||||
package ghttp
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
. "github.com/onsi/gomega"
|
||||
@@ -164,6 +167,11 @@ type Server struct {
|
||||
//Only applies if AllowUnhandledRequests is true
|
||||
UnhandledRequestStatusCode int
|
||||
|
||||
//If provided, ghttp will log about each request received to the provided io.Writer
|
||||
//Defaults to nil
|
||||
//If you're using Ginkgo, set this to GinkgoWriter to get improved output during failures
|
||||
Writer io.Writer
|
||||
|
||||
receivedRequests []*http.Request
|
||||
requestHandlers []http.HandlerFunc
|
||||
routedHandlers []routedHandler
|
||||
@@ -208,9 +216,35 @@ func (s *Server) Close() {
|
||||
func (s *Server) ServeHTTP(w http.ResponseWriter, req *http.Request) {
|
||||
s.writeLock.Lock()
|
||||
defer func() {
|
||||
recover()
|
||||
e := recover()
|
||||
if e != nil {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
//If the handler panics GHTTP will silently succeed. This is bad™.
|
||||
//To catch this case we need to fail the test if the handler has panicked.
|
||||
//However, if the handler is panicking because Ginkgo's causing it to panic (i.e. an asswertion failed)
|
||||
//then we shouldn't double-report the error as this will confuse people.
|
||||
|
||||
//So: step 1, if this is a Ginkgo panic - do nothing, Ginkgo's aware of the failure
|
||||
eAsString, ok := e.(string)
|
||||
if ok && strings.Contains(eAsString, "defer GinkgoRecover()") {
|
||||
return
|
||||
}
|
||||
|
||||
//If we're here, we have to do step 2: assert that the error is nil. This assertion will
|
||||
//allow us to fail the test suite (note: we can't call Fail since Gomega is not allowed to import Ginkgo).
|
||||
//Since a failed assertion throws a panic, and we are likely in a goroutine, we need to defer within our defer!
|
||||
defer func() {
|
||||
recover()
|
||||
}()
|
||||
Ω(e).Should(BeNil(), "Handler Panicked")
|
||||
}()
|
||||
|
||||
if s.Writer != nil {
|
||||
s.Writer.Write([]byte(fmt.Sprintf("GHTTP Received Request: %s - %s\n", req.Method, req.URL)))
|
||||
}
|
||||
|
||||
s.receivedRequests = append(s.receivedRequests, req)
|
||||
if routedHandler, ok := s.handlerForRoute(req.Method, req.URL.Path); ok {
|
||||
s.writeLock.Unlock()
|
||||
|
||||
13
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package ghttp_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGHTTP(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "GHTTP Suite")
|
||||
}
|
||||
591
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_test.go
generated
vendored
591
Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_test.go
generated
vendored
@@ -1,591 +0,0 @@
|
||||
package ghttp_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"regexp"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
. "github.com/onsi/gomega/ghttp"
|
||||
)
|
||||
|
||||
var _ = Describe("TestServer", func() {
|
||||
var (
|
||||
resp *http.Response
|
||||
err error
|
||||
s *Server
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
s = NewServer()
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
s.Close()
|
||||
})
|
||||
|
||||
Describe("closing client connections", func() {
|
||||
It("closes", func() {
|
||||
s.AppendHandlers(
|
||||
func(w http.ResponseWriter, req *http.Request) {
|
||||
w.Write([]byte("hello"))
|
||||
},
|
||||
func(w http.ResponseWriter, req *http.Request) {
|
||||
s.CloseClientConnections()
|
||||
},
|
||||
)
|
||||
|
||||
resp, err := http.Get(s.URL())
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(resp.StatusCode).Should(Equal(200))
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
resp.Body.Close()
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(body).Should(Equal([]byte("hello")))
|
||||
|
||||
resp, err = http.Get(s.URL())
|
||||
Ω(err).Should(HaveOccurred())
|
||||
Ω(resp).Should(BeNil())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("allowing unhandled requests", func() {
|
||||
Context("when true", func() {
|
||||
BeforeEach(func() {
|
||||
s.AllowUnhandledRequests = true
|
||||
s.UnhandledRequestStatusCode = http.StatusForbidden
|
||||
resp, err = http.Get(s.URL() + "/foo")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should allow unhandled requests and respond with the passed in status code", func() {
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusForbidden))
|
||||
|
||||
data, err := ioutil.ReadAll(resp.Body)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(data).Should(BeEmpty())
|
||||
})
|
||||
|
||||
It("should record the requests", func() {
|
||||
Ω(s.ReceivedRequests()).Should(HaveLen(1))
|
||||
Ω(s.ReceivedRequests()[0].URL.Path).Should(Equal("/foo"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when false", func() {
|
||||
It("should fail when attempting a request", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.Get(s.URL() + "/foo")
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("Received Unhandled Request"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Managing Handlers", func() {
|
||||
var called []string
|
||||
BeforeEach(func() {
|
||||
called = []string{}
|
||||
s.RouteToHandler("GET", "/routed", func(w http.ResponseWriter, req *http.Request) {
|
||||
called = append(called, "r1")
|
||||
})
|
||||
s.RouteToHandler("POST", regexp.MustCompile(`/routed\d`), func(w http.ResponseWriter, req *http.Request) {
|
||||
called = append(called, "r2")
|
||||
})
|
||||
s.AppendHandlers(func(w http.ResponseWriter, req *http.Request) {
|
||||
called = append(called, "A")
|
||||
}, func(w http.ResponseWriter, req *http.Request) {
|
||||
called = append(called, "B")
|
||||
})
|
||||
})
|
||||
|
||||
It("should prefer routed handlers if there is a match", func() {
|
||||
http.Get(s.URL() + "/routed")
|
||||
http.Post(s.URL()+"/routed7", "application/json", nil)
|
||||
http.Get(s.URL() + "/foo")
|
||||
http.Get(s.URL() + "/routed")
|
||||
http.Post(s.URL()+"/routed9", "application/json", nil)
|
||||
http.Get(s.URL() + "/bar")
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.Get(s.URL() + "/foo")
|
||||
http.Get(s.URL() + "/routed/not/a/match")
|
||||
http.Get(s.URL() + "/routed7")
|
||||
http.Post(s.URL()+"/routed", "application/json", nil)
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("Received Unhandled Request"))
|
||||
Ω(failures).Should(HaveLen(4))
|
||||
|
||||
http.Post(s.URL()+"/routed3", "application/json", nil)
|
||||
|
||||
Ω(called).Should(Equal([]string{"r1", "r2", "A", "r1", "r2", "B", "r2"}))
|
||||
})
|
||||
|
||||
It("should override routed handlers when reregistered", func() {
|
||||
s.RouteToHandler("GET", "/routed", func(w http.ResponseWriter, req *http.Request) {
|
||||
called = append(called, "r3")
|
||||
})
|
||||
s.RouteToHandler("POST", regexp.MustCompile(`/routed\d`), func(w http.ResponseWriter, req *http.Request) {
|
||||
called = append(called, "r4")
|
||||
})
|
||||
|
||||
http.Get(s.URL() + "/routed")
|
||||
http.Post(s.URL()+"/routed7", "application/json", nil)
|
||||
|
||||
Ω(called).Should(Equal([]string{"r3", "r4"}))
|
||||
})
|
||||
|
||||
It("should call the appended handlers, in order, as requests come in", func() {
|
||||
http.Get(s.URL() + "/foo")
|
||||
Ω(called).Should(Equal([]string{"A"}))
|
||||
|
||||
http.Get(s.URL() + "/foo")
|
||||
Ω(called).Should(Equal([]string{"A", "B"}))
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.Get(s.URL() + "/foo")
|
||||
})
|
||||
|
||||
Ω(failures[0]).Should(ContainSubstring("Received Unhandled Request"))
|
||||
})
|
||||
|
||||
Describe("Overwriting an existing handler", func() {
|
||||
BeforeEach(func() {
|
||||
s.SetHandler(0, func(w http.ResponseWriter, req *http.Request) {
|
||||
called = append(called, "C")
|
||||
})
|
||||
})
|
||||
|
||||
It("should override the specified handler", func() {
|
||||
http.Get(s.URL() + "/foo")
|
||||
http.Get(s.URL() + "/foo")
|
||||
Ω(called).Should(Equal([]string{"C", "B"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Getting an existing handler", func() {
|
||||
It("should return the handler func", func() {
|
||||
s.GetHandler(1)(nil, nil)
|
||||
Ω(called).Should(Equal([]string{"B"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Wrapping an existing handler", func() {
|
||||
BeforeEach(func() {
|
||||
s.WrapHandler(0, func(w http.ResponseWriter, req *http.Request) {
|
||||
called = append(called, "C")
|
||||
})
|
||||
})
|
||||
|
||||
It("should wrap the existing handler in a new handler", func() {
|
||||
http.Get(s.URL() + "/foo")
|
||||
http.Get(s.URL() + "/foo")
|
||||
Ω(called).Should(Equal([]string{"A", "C", "B"}))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Request Handlers", func() {
|
||||
Describe("VerifyRequest", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(VerifyRequest("GET", "/foo"))
|
||||
})
|
||||
|
||||
It("should verify the method, path", func() {
|
||||
resp, err = http.Get(s.URL() + "/foo?baz=bar")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should verify the method, path", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.Get(s.URL() + "/foo2")
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
|
||||
It("should verify the method, path", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.Post(s.URL()+"/foo", "application/json", nil)
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
|
||||
Context("when passed a rawQuery", func() {
|
||||
It("should also be possible to verify the rawQuery", func() {
|
||||
s.SetHandler(0, VerifyRequest("GET", "/foo", "baz=bar"))
|
||||
resp, err = http.Get(s.URL() + "/foo?baz=bar")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when passed a matcher for path", func() {
|
||||
It("should apply the matcher", func() {
|
||||
s.SetHandler(0, VerifyRequest("GET", MatchRegexp(`/foo/[a-f]*/3`)))
|
||||
resp, err = http.Get(s.URL() + "/foo/abcdefa/3")
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("VerifyContentType", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("GET", "/foo"),
|
||||
VerifyContentType("application/octet-stream"),
|
||||
))
|
||||
})
|
||||
|
||||
It("should verify the content type", func() {
|
||||
req, err := http.NewRequest("GET", s.URL()+"/foo", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
req.Header.Set("Content-Type", "application/octet-stream")
|
||||
|
||||
resp, err = http.DefaultClient.Do(req)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should verify the content type", func() {
|
||||
req, err := http.NewRequest("GET", s.URL()+"/foo", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.DefaultClient.Do(req)
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Verify BasicAuth", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("GET", "/foo"),
|
||||
VerifyBasicAuth("bob", "password"),
|
||||
))
|
||||
})
|
||||
|
||||
It("should verify basic auth", func() {
|
||||
req, err := http.NewRequest("GET", s.URL()+"/foo", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
req.SetBasicAuth("bob", "password")
|
||||
|
||||
resp, err = http.DefaultClient.Do(req)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should verify basic auth", func() {
|
||||
req, err := http.NewRequest("GET", s.URL()+"/foo", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
req.SetBasicAuth("bob", "bassword")
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.DefaultClient.Do(req)
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
|
||||
It("should require basic auth header", func() {
|
||||
req, err := http.NewRequest("GET", s.URL()+"/foo", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.DefaultClient.Do(req)
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("VerifyHeader", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("GET", "/foo"),
|
||||
VerifyHeader(http.Header{
|
||||
"accept": []string{"jpeg", "png"},
|
||||
"cache-control": []string{"omicron"},
|
||||
"Return-Path": []string{"hobbiton"},
|
||||
}),
|
||||
))
|
||||
})
|
||||
|
||||
It("should verify the headers", func() {
|
||||
req, err := http.NewRequest("GET", s.URL()+"/foo", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
req.Header.Add("Accept", "jpeg")
|
||||
req.Header.Add("Accept", "png")
|
||||
req.Header.Add("Cache-Control", "omicron")
|
||||
req.Header.Add("return-path", "hobbiton")
|
||||
|
||||
resp, err = http.DefaultClient.Do(req)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should verify the headers", func() {
|
||||
req, err := http.NewRequest("GET", s.URL()+"/foo", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
req.Header.Add("Schmaccept", "jpeg")
|
||||
req.Header.Add("Schmaccept", "png")
|
||||
req.Header.Add("Cache-Control", "omicron")
|
||||
req.Header.Add("return-path", "hobbiton")
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.DefaultClient.Do(req)
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("VerifyHeaderKV", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("GET", "/foo"),
|
||||
VerifyHeaderKV("accept", "jpeg", "png"),
|
||||
VerifyHeaderKV("cache-control", "omicron"),
|
||||
VerifyHeaderKV("Return-Path", "hobbiton"),
|
||||
))
|
||||
})
|
||||
|
||||
It("should verify the headers", func() {
|
||||
req, err := http.NewRequest("GET", s.URL()+"/foo", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
req.Header.Add("Accept", "jpeg")
|
||||
req.Header.Add("Accept", "png")
|
||||
req.Header.Add("Cache-Control", "omicron")
|
||||
req.Header.Add("return-path", "hobbiton")
|
||||
|
||||
resp, err = http.DefaultClient.Do(req)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should verify the headers", func() {
|
||||
req, err := http.NewRequest("GET", s.URL()+"/foo", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
req.Header.Add("Accept", "jpeg")
|
||||
req.Header.Add("Cache-Control", "omicron")
|
||||
req.Header.Add("return-path", "hobbiton")
|
||||
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.DefaultClient.Do(req)
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("VerifyJSON", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
VerifyJSON(`{"a":3, "b":2}`),
|
||||
))
|
||||
})
|
||||
|
||||
It("should verify the json body and the content type", func() {
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", bytes.NewReader([]byte(`{"b":2, "a":3}`)))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should verify the json body and the content type", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.Post(s.URL()+"/foo", "application/json", bytes.NewReader([]byte(`{"b":2, "a":4}`)))
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
|
||||
It("should verify the json body and the content type", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.Post(s.URL()+"/foo", "application/not-json", bytes.NewReader([]byte(`{"b":2, "a":3}`)))
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("VerifyJSONRepresenting", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
VerifyJSONRepresenting([]int{1, 3, 5}),
|
||||
))
|
||||
})
|
||||
|
||||
It("should verify the json body and the content type", func() {
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", bytes.NewReader([]byte(`[1,3,5]`)))
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should verify the json body and the content type", func() {
|
||||
failures := InterceptGomegaFailures(func() {
|
||||
http.Post(s.URL()+"/foo", "application/json", bytes.NewReader([]byte(`[1,3]`)))
|
||||
})
|
||||
Ω(failures).Should(HaveLen(1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("RespondWith", func() {
|
||||
Context("without headers", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
RespondWith(http.StatusCreated, "sweet"),
|
||||
), CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
RespondWith(http.StatusOK, []byte("sour")),
|
||||
))
|
||||
})
|
||||
|
||||
It("should return the response", func() {
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusCreated))
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(body).Should(Equal([]byte("sweet")))
|
||||
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusOK))
|
||||
|
||||
body, err = ioutil.ReadAll(resp.Body)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(body).Should(Equal([]byte("sour")))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with headers", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
RespondWith(http.StatusCreated, "sweet", http.Header{"X-Custom-Header": []string{"my header"}}),
|
||||
))
|
||||
})
|
||||
|
||||
It("should return the headers too", func() {
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusCreated))
|
||||
Ω(ioutil.ReadAll(resp.Body)).Should(Equal([]byte("sweet")))
|
||||
Ω(resp.Header.Get("X-Custom-Header")).Should(Equal("my header"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("RespondWithPtr", func() {
|
||||
var code int
|
||||
var byteBody []byte
|
||||
var stringBody string
|
||||
BeforeEach(func() {
|
||||
code = http.StatusOK
|
||||
byteBody = []byte("sweet")
|
||||
stringBody = "sour"
|
||||
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
RespondWithPtr(&code, &byteBody),
|
||||
), CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
RespondWithPtr(&code, &stringBody),
|
||||
))
|
||||
})
|
||||
|
||||
It("should return the response", func() {
|
||||
code = http.StatusCreated
|
||||
byteBody = []byte("tasty")
|
||||
stringBody = "treat"
|
||||
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusCreated))
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(body).Should(Equal([]byte("tasty")))
|
||||
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusCreated))
|
||||
|
||||
body, err = ioutil.ReadAll(resp.Body)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(body).Should(Equal([]byte("treat")))
|
||||
})
|
||||
|
||||
Context("when passed a nil body", func() {
|
||||
BeforeEach(func() {
|
||||
s.SetHandler(0, CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
RespondWithPtr(&code, nil),
|
||||
))
|
||||
})
|
||||
|
||||
It("should return an empty body and not explode", func() {
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", nil)
|
||||
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusOK))
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(body).Should(BeEmpty())
|
||||
|
||||
Ω(s.ReceivedRequests()).Should(HaveLen(1))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("RespondWithJSON", func() {
|
||||
BeforeEach(func() {
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
RespondWithJSONEncoded(http.StatusCreated, []int{1, 2, 3}),
|
||||
))
|
||||
})
|
||||
|
||||
It("should return the response", func() {
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusCreated))
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(body).Should(MatchJSON("[1,2,3]"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("RespondWithJSONPtr", func() {
|
||||
var code int
|
||||
var object interface{}
|
||||
BeforeEach(func() {
|
||||
code = http.StatusOK
|
||||
object = []int{1, 2, 3}
|
||||
|
||||
s.AppendHandlers(CombineHandlers(
|
||||
VerifyRequest("POST", "/foo"),
|
||||
RespondWithJSONEncodedPtr(&code, &object),
|
||||
))
|
||||
})
|
||||
|
||||
It("should return the response", func() {
|
||||
code = http.StatusCreated
|
||||
object = []int{4, 5, 6}
|
||||
resp, err = http.Post(s.URL()+"/foo", "application/json", nil)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
|
||||
Ω(resp.StatusCode).Should(Equal(http.StatusCreated))
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
Ω(err).ShouldNot(HaveOccurred())
|
||||
Ω(body).Should(MatchJSON("[4,5,6]"))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
13
Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_suite_test.go
generated
vendored
13
Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_suite_test.go
generated
vendored
@@ -1,13 +0,0 @@
|
||||
package assertion_test
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAssertion(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Assertion Suite")
|
||||
}
|
||||
252
Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_test.go
generated
vendored
252
Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_test.go
generated
vendored
@@ -1,252 +0,0 @@
|
||||
package assertion_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
. "github.com/onsi/ginkgo"
|
||||
. "github.com/onsi/gomega"
|
||||
. "github.com/onsi/gomega/internal/assertion"
|
||||
"github.com/onsi/gomega/internal/fakematcher"
|
||||
)
|
||||
|
||||
var _ = Describe("Assertion", func() {
|
||||
var (
|
||||
a *Assertion
|
||||
failureMessage string
|
||||
failureCallerSkip int
|
||||
matcher *fakematcher.FakeMatcher
|
||||
)
|
||||
|
||||
input := "The thing I'm testing"
|
||||
|
||||
var fakeFailHandler = func(message string, callerSkip ...int) {
|
||||
failureMessage = message
|
||||
if len(callerSkip) == 1 {
|
||||
failureCallerSkip = callerSkip[0]
|
||||
}
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
matcher = &fakematcher.FakeMatcher{}
|
||||
failureMessage = ""
|
||||
failureCallerSkip = 0
|
||||
a = New(input, fakeFailHandler, 1)
|
||||
})
|
||||
|
||||
Context("when called", func() {
|
||||
It("should pass the provided input value to the matcher", func() {
|
||||
a.Should(matcher)
|
||||
|
||||
Ω(matcher.ReceivedActual).Should(Equal(input))
|
||||
matcher.ReceivedActual = ""
|
||||
|
||||
a.ShouldNot(matcher)
|
||||
|
||||
Ω(matcher.ReceivedActual).Should(Equal(input))
|
||||
matcher.ReceivedActual = ""
|
||||
|
||||
a.To(matcher)
|
||||
|
||||
Ω(matcher.ReceivedActual).Should(Equal(input))
|
||||
matcher.ReceivedActual = ""
|
||||
|
||||
a.ToNot(matcher)
|
||||
|
||||
Ω(matcher.ReceivedActual).Should(Equal(input))
|
||||
matcher.ReceivedActual = ""
|
||||
|
||||
a.NotTo(matcher)
|
||||
|
||||
Ω(matcher.ReceivedActual).Should(Equal(input))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the matcher succeeds", func() {
|
||||
BeforeEach(func() {
|
||||
matcher.MatchesToReturn = true
|
||||
matcher.ErrToReturn = nil
|
||||
})
|
||||
|
||||
Context("and a positive assertion is being made", func() {
|
||||
It("should not call the failure callback", func() {
|
||||
a.Should(matcher)
|
||||
Ω(failureMessage).Should(Equal(""))
|
||||
})
|
||||
|
||||
It("should be true", func() {
|
||||
Ω(a.Should(matcher)).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("and a negative assertion is being made", func() {
|
||||
It("should call the failure callback", func() {
|
||||
a.ShouldNot(matcher)
|
||||
Ω(failureMessage).Should(Equal("negative: The thing I'm testing"))
|
||||
Ω(failureCallerSkip).Should(Equal(3))
|
||||
})
|
||||
|
||||
It("should be false", func() {
|
||||
Ω(a.ShouldNot(matcher)).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the matcher fails", func() {
|
||||
BeforeEach(func() {
|
||||
matcher.MatchesToReturn = false
|
||||
matcher.ErrToReturn = nil
|
||||
})
|
||||
|
||||
Context("and a positive assertion is being made", func() {
|
||||
It("should call the failure callback", func() {
|
||||
a.Should(matcher)
|
||||
Ω(failureMessage).Should(Equal("positive: The thing I'm testing"))
|
||||
Ω(failureCallerSkip).Should(Equal(3))
|
||||
})
|
||||
|
||||
It("should be false", func() {
|
||||
Ω(a.Should(matcher)).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("and a negative assertion is being made", func() {
|
||||
It("should not call the failure callback", func() {
|
||||
a.ShouldNot(matcher)
|
||||
Ω(failureMessage).Should(Equal(""))
|
||||
})
|
||||
|
||||
It("should be true", func() {
|
||||
Ω(a.ShouldNot(matcher)).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("When reporting a failure", func() {
|
||||
BeforeEach(func() {
|
||||
matcher.MatchesToReturn = false
|
||||
matcher.ErrToReturn = nil
|
||||
})
|
||||
|
||||
Context("and there is an optional description", func() {
|
||||
It("should append the description to the failure message", func() {
|
||||
a.Should(matcher, "A description")
|
||||
Ω(failureMessage).Should(Equal("A description\npositive: The thing I'm testing"))
|
||||
Ω(failureCallerSkip).Should(Equal(3))
|
||||
})
|
||||
})
|
||||
|
||||
Context("and there are multiple arguments to the optional description", func() {
|
||||
It("should append the formatted description to the failure message", func() {
|
||||
a.Should(matcher, "A description of [%d]", 3)
|
||||
Ω(failureMessage).Should(Equal("A description of [3]\npositive: The thing I'm testing"))
|
||||
Ω(failureCallerSkip).Should(Equal(3))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("When the matcher returns an error", func() {
|
||||
BeforeEach(func() {
|
||||
matcher.ErrToReturn = errors.New("Kaboom!")
|
||||
})
|
||||
|
||||
Context("and a positive assertion is being made", func() {
|
||||
It("should call the failure callback", func() {
|
||||
matcher.MatchesToReturn = true
|
||||
a.Should(matcher)
|
||||
Ω(failureMessage).Should(Equal("Kaboom!"))
|
||||
Ω(failureCallerSkip).Should(Equal(3))
|
||||
})
|
||||
})
|
||||
|
||||
Context("and a negative assertion is being made", func() {
|
||||
It("should call the failure callback", func() {
|
||||
matcher.MatchesToReturn = false
|
||||
a.ShouldNot(matcher)
|
||||
Ω(failureMessage).Should(Equal("Kaboom!"))
|
||||
Ω(failureCallerSkip).Should(Equal(3))
|
||||
})
|
||||
})
|
||||
|
||||
It("should always be false", func() {
|
||||
Ω(a.Should(matcher)).Should(BeFalse())
|
||||
Ω(a.ShouldNot(matcher)).Should(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when there are extra parameters", func() {
|
||||
It("(a simple example)", func() {
|
||||
Ω(func() (string, int, error) {
|
||||
return "foo", 0, nil
|
||||
}()).Should(Equal("foo"))
|
||||
})
|
||||
|
||||
Context("when the parameters are all nil or zero", func() {
|
||||
It("should invoke the matcher", func() {
|
||||
matcher.MatchesToReturn = true
|
||||
matcher.ErrToReturn = nil
|
||||
|
||||
var typedNil []string
|
||||
a = New(input, fakeFailHandler, 1, 0, nil, typedNil)
|
||||
|
||||
result := a.Should(matcher)
|
||||
Ω(result).Should(BeTrue())
|
||||
Ω(matcher.ReceivedActual).Should(Equal(input))
|
||||
|
||||
Ω(failureMessage).Should(BeZero())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when any of the parameters are not nil or zero", func() {
|
||||
It("should call the failure callback", func() {
|
||||
matcher.MatchesToReturn = false
|
||||
matcher.ErrToReturn = nil
|
||||
|
||||
a = New(input, fakeFailHandler, 1, errors.New("foo"))
|
||||
result := a.Should(matcher)
|
||||
Ω(result).Should(BeFalse())
|
||||
Ω(matcher.ReceivedActual).Should(BeZero(), "The matcher doesn't even get called")
|
||||
Ω(failureMessage).Should(ContainSubstring("foo"))
|
||||
failureMessage = ""
|
||||
|
||||
a = New(input, fakeFailHandler, 1, nil, 1)
|
||||
result = a.ShouldNot(matcher)
|
||||
Ω(result).Should(BeFalse())
|
||||
Ω(failureMessage).Should(ContainSubstring("1"))
|
||||
failureMessage = ""
|
||||
|
||||
a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"})
|
||||
result = a.To(matcher)
|
||||
Ω(result).Should(BeFalse())
|
||||
Ω(failureMessage).Should(ContainSubstring("foo"))
|
||||
failureMessage = ""
|
||||
|
||||
a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"})
|
||||
result = a.ToNot(matcher)
|
||||
Ω(result).Should(BeFalse())
|
||||
Ω(failureMessage).Should(ContainSubstring("foo"))
|
||||
failureMessage = ""
|
||||
|
||||
a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"})
|
||||
result = a.NotTo(matcher)
|
||||
Ω(result).Should(BeFalse())
|
||||
Ω(failureMessage).Should(ContainSubstring("foo"))
|
||||
Ω(failureCallerSkip).Should(Equal(3))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Context("Making an assertion without a registered fail handler", func() {
|
||||
It("should panic", func() {
|
||||
defer func() {
|
||||
e := recover()
|
||||
RegisterFailHandler(Fail)
|
||||
if e == nil {
|
||||
Fail("expected a panic to have occured")
|
||||
}
|
||||
}()
|
||||
|
||||
RegisterFailHandler(nil)
|
||||
Ω(true).Should(BeTrue())
|
||||
})
|
||||
})
|
||||
})
|
||||
12
Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go
generated
vendored
12
Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go
generated
vendored
@@ -6,6 +6,7 @@ import (
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"github.com/onsi/gomega/internal/oraclematcher"
|
||||
"github.com/onsi/gomega/types"
|
||||
)
|
||||
|
||||
@@ -86,21 +87,12 @@ func (assertion *AsyncAssertion) pollActual() (interface{}, error) {
|
||||
return assertion.actualInput, nil
|
||||
}
|
||||
|
||||
type oracleMatcher interface {
|
||||
MatchMayChangeInTheFuture(actual interface{}) bool
|
||||
}
|
||||
|
||||
func (assertion *AsyncAssertion) matcherMayChange(matcher types.GomegaMatcher, value interface{}) bool {
|
||||
if assertion.actualInputIsAFunction() {
|
||||
return true
|
||||
}
|
||||
|
||||
oracleMatcher, ok := matcher.(oracleMatcher)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
|
||||
return oracleMatcher.MatchMayChangeInTheFuture(value)
|
||||
return oraclematcher.MatchMayChangeInTheFuture(matcher, value)
|
||||
}
|
||||
|
||||
func (assertion *AsyncAssertion) match(matcher types.GomegaMatcher, desiredMatch bool, optionalDescription ...interface{}) bool {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user