text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestConfigSourceDirectory(t *testing.T) {
newTestConfig("source_directory: ./build", func(c Config, seq sequence) {
c.loadSourceDirectory("/Users/md/src/static/static.yml", seq)
assert.Equal(t, "/Users/md/src/static/build", c.SourceDirectory)
})
} | explode_data.jsonl/8656 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 91
} | [
2830,
3393,
2648,
3608,
9310,
1155,
353,
8840,
836,
8,
341,
8638,
2271,
2648,
445,
2427,
14846,
25,
12991,
5834,
497,
2915,
1337,
5532,
11,
12981,
8500,
8,
341,
197,
1444,
5104,
3608,
9310,
4283,
7137,
74823,
13437,
36643,
36643,
33936,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestTypeConvIssue804(t *testing.T) {
gopClTest(t, `
c := make(chan int)
d := (chan<- int)(c)
e := (<-chan int)(c)
f := (*int)(nil)
a := c == d
b := c == e
`, `package main
func main() {
c := make(chan int)
d := (chan<- int)(c)
e := (<-chan int)(c)
f := (*int)(nil)
a := c == d
b := c == e
}
`)
} | explode_data.jsonl/73576 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 147
} | [
2830,
3393,
929,
34892,
42006,
23,
15,
19,
1155,
353,
8840,
836,
8,
341,
3174,
453,
5066,
2271,
1155,
11,
22074,
66,
1669,
1281,
35190,
526,
340,
67,
1669,
320,
5658,
45342,
526,
2376,
66,
340,
68,
1669,
22438,
12,
5658,
526,
2376,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLinkRouteExists(t *testing.T) {
mockNetLinkOps := new(mocks.NetLinkOps)
mockLink := new(netlink_mocks.Link)
// below is defined in net_linux.go
netLinkOps = mockNetLinkOps
tests := []struct {
desc string
inputLink netlink.Link
inputGwIP net.IP
inputSubnet *net.IPNet
errExp bool
outBoolFlag bool
onRetArgsNetLinkLibOpers []ovntest.TestifyMockHelper
onRetArgsLinkIfaceOpers []ovntest.TestifyMockHelper
}{
{
desc: "tests code path when RouteListFiltered() returns error",
inputLink: mockLink,
inputGwIP: ovntest.MustParseIP("192.168.0.1"),
inputSubnet: ovntest.MustParseIPNet("192.168.0.0/24"),
errExp: true,
outBoolFlag: false,
onRetArgsNetLinkLibOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "RouteListFiltered", OnCallMethodArgType: []string{"int", "*netlink.Route", "uint64"}, RetArgList: []interface{}{[]netlink.Route{}, fmt.Errorf("mock error")}},
},
onRetArgsLinkIfaceOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}},
},
},
{
desc: "tests code path when RouteListFiltered() returns empty routes list",
inputLink: mockLink,
inputGwIP: ovntest.MustParseIP("192.168.0.1"),
inputSubnet: ovntest.MustParseIPNet("192.168.0.0/24"),
outBoolFlag: false,
onRetArgsNetLinkLibOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "RouteListFiltered", OnCallMethodArgType: []string{"int", "*netlink.Route", "uint64"}, RetArgList: []interface{}{[]netlink.Route{}, nil}},
},
onRetArgsLinkIfaceOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}},
},
},
{
desc: "gateway IP input is nil",
inputLink: mockLink,
inputSubnet: ovntest.MustParseIPNet("192.168.0.0/24"),
onRetArgsNetLinkLibOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "RouteListFiltered", OnCallMethodArgType: []string{"int", "*netlink.Route", "uint64"}, RetArgList: []interface{}{[]netlink.Route{}, nil}},
},
onRetArgsLinkIfaceOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}},
},
},
{
desc: "tests code path where route GW IP DOES NOT MATCH with input GW IP",
inputLink: mockLink,
inputGwIP: ovntest.MustParseIP("192.168.0.1"),
inputSubnet: ovntest.MustParseIPNet("192.168.0.0/24"),
onRetArgsNetLinkLibOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "RouteListFiltered", OnCallMethodArgType: []string{"int", "*netlink.Route", "uint64"}, RetArgList: []interface{}{[]netlink.Route{
{Gw: ovntest.MustParseIP("192.168.1.1")},
}, nil}},
},
onRetArgsLinkIfaceOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}},
},
},
{
desc: "tests code path where route GW IP MATCHES with input GW IP",
inputLink: mockLink,
inputGwIP: ovntest.MustParseIP("192.168.0.1"),
inputSubnet: ovntest.MustParseIPNet("192.168.0.0/24"),
outBoolFlag: true,
onRetArgsNetLinkLibOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "RouteListFiltered", OnCallMethodArgType: []string{"int", "*netlink.Route", "uint64"}, RetArgList: []interface{}{[]netlink.Route{
{Gw: ovntest.MustParseIP("192.168.0.1")},
}, nil}},
},
onRetArgsLinkIfaceOpers: []ovntest.TestifyMockHelper{
{OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}},
},
},
}
for i, tc := range tests {
t.Run(fmt.Sprintf("%d:%s", i, tc.desc), func(t *testing.T) {
ovntest.ProcessMockFnList(&mockNetLinkOps.Mock, tc.onRetArgsNetLinkLibOpers)
ovntest.ProcessMockFnList(&mockLink.Mock, tc.onRetArgsLinkIfaceOpers)
flag, err := LinkRouteExists(tc.inputLink, tc.inputGwIP, tc.inputSubnet)
t.Log(flag, err)
if tc.errExp {
assert.Error(t, err)
} else {
assert.Nil(t, err)
}
if tc.outBoolFlag {
assert.True(t, flag)
}
mockNetLinkOps.AssertExpectations(t)
mockLink.AssertExpectations(t)
})
}
} | explode_data.jsonl/30958 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1997
} | [
2830,
3393,
3939,
4899,
15575,
1155,
353,
8840,
836,
8,
341,
77333,
6954,
3939,
38904,
1669,
501,
1255,
25183,
16993,
3939,
38904,
340,
77333,
3939,
1669,
501,
30723,
2080,
717,
25183,
22534,
340,
197,
322,
3685,
374,
4512,
304,
4179,
7... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestBuilderFeeGranter(t *testing.T) {
// keys and addresses
_, _, addr1 := testdata.KeyTestPubAddr()
// msg and signatures
msg1 := testdata.NewTestMsg(addr1, addr2)
feeAmount := testdata.NewTestFeeAmount()
msgs := []sdk.Msg{msg1}
txBuilder := newBuilder(nil)
err := txBuilder.SetMsgs(msgs...)
require.NoError(t, err)
txBuilder.SetGasLimit(200000)
txBuilder.SetFeeAmount(feeAmount)
require.Empty(t, txBuilder.GetTx().FeeGranter())
// set fee granter
txBuilder.SetFeeGranter(addr1)
require.Equal(t, addr1, txBuilder.GetTx().FeeGranter())
} | explode_data.jsonl/2446 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 219
} | [
2830,
3393,
3297,
41941,
6464,
80030,
1155,
353,
8840,
836,
8,
341,
197,
322,
6894,
323,
14230,
198,
197,
6878,
8358,
10789,
16,
1669,
1273,
691,
9610,
2271,
29162,
13986,
2822,
197,
322,
3750,
323,
32628,
198,
21169,
16,
1669,
1273,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestOutputExtensionRemappingDir(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
console.log('test');
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputExtensionJS: ".notjs",
AbsOutputDir: "/out",
},
})
} | explode_data.jsonl/38565 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 162
} | [
2830,
3393,
5097,
12049,
6590,
3629,
6184,
1155,
353,
8840,
836,
8,
341,
11940,
57239,
25952,
33,
1241,
832,
1155,
11,
51450,
515,
197,
74075,
25,
2415,
14032,
30953,
515,
298,
197,
3115,
4085,
2857,
788,
22074,
571,
12160,
1665,
492,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_EmptyInput(t *testing.T) {
jb := []byte("")
var out string
err := jsoniter.Unmarshal(jb, &out)
if err == nil {
t.Errorf("Expected error")
}
} | explode_data.jsonl/73525 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 69
} | [
2830,
3393,
76060,
1595,
2505,
1155,
353,
8840,
836,
8,
341,
12428,
65,
1669,
3056,
3782,
31764,
2405,
700,
914,
198,
9859,
1669,
2951,
2015,
38097,
3325,
65,
11,
609,
411,
340,
743,
1848,
621,
2092,
341,
197,
3244,
13080,
445,
18896,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestIsSecure(t *testing.T) {
testCases := []struct {
description string
url string
xForwardedProto string
tls bool
expectIsSecure bool
}{
{
description: "HTTP",
url: "http://host.com",
expectIsSecure: false,
},
{
description: "HTTPS - Forwarded Protocol",
url: "http://host.com",
xForwardedProto: "https",
expectIsSecure: true,
},
{
description: "HTTPS - Forwarded Protocol - Case Insensitive",
url: "http://host.com",
xForwardedProto: "HTTPS",
expectIsSecure: true,
},
{
description: "HTTPS - Protocol",
url: "https://host.com",
expectIsSecure: true,
},
{
description: "HTTPS - Protocol - Case Insensitive",
url: "HTTPS://host.com",
expectIsSecure: true,
},
{
description: "HTTPS - TLS",
url: "http://host.com",
tls: true,
expectIsSecure: true,
},
}
for _, test := range testCases {
request, err := http.NewRequest("GET", test.url, nil)
if err != nil {
t.Fatalf("Unable to create test http request. Err: %v", err)
}
if test.xForwardedProto != "" {
request.Header.Add("X-Forwarded-Proto", test.xForwardedProto)
}
if test.tls {
request.TLS = &tls.ConnectionState{}
}
result := IsSecure(request)
assert.Equal(t, test.expectIsSecure, result, test.description)
}
} | explode_data.jsonl/3184 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 680
} | [
2830,
3393,
3872,
49813,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
42407,
257,
914,
198,
197,
19320,
1797,
914,
198,
197,
10225,
25925,
291,
31549,
914,
198,
197,
3244,
4730,
1797,
1807,
198,
197,
24952,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestStartCommandLineWithHelpCommand(t *testing.T){
os.Args = []string{
"bellt",
"help",
}
err := StartCommandLine(emptyOnlyOneCommand)
if err != nil {
t.Errorf("Error handling error: want %s, got %s", "nil", err.Error())
}
} | explode_data.jsonl/30301 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 92
} | [
2830,
3393,
3479,
71885,
2354,
12689,
4062,
1155,
353,
8840,
836,
1264,
25078,
51015,
284,
3056,
917,
515,
197,
197,
1,
17250,
83,
756,
197,
197,
1,
8653,
756,
197,
630,
9859,
1669,
5145,
71885,
24216,
7308,
3966,
4062,
340,
743,
1848... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestIsHTTPError(t *testing.T) {
cases := []struct {
name string
err error
expected bool
result *HTTPStatusCodeError
}{
{
name: "non-http error",
err: errors.New("some error"),
expected: false,
result: nil,
},
{
name: "http error",
err: HTTPStatusCodeError{StatusCode: http.StatusGone},
expected: true,
result: &HTTPStatusCodeError{StatusCode: http.StatusGone},
},
{
name: "http pointer error",
err: &HTTPStatusCodeError{StatusCode: http.StatusGone},
expected: true,
result: &HTTPStatusCodeError{StatusCode: http.StatusGone},
},
{
name: "nil",
err: nil,
expected: false,
result: nil,
},
}
for _, tc := range cases {
err, actual := IsHTTPError(tc.err)
if tc.expected != actual {
t.Errorf("%v: expected %v, got %v", tc.name, tc.expected, actual)
}
if tc.result != err {
if *tc.result != *err {
t.Errorf("%v: expected %v, got %v", tc.name, tc.result, err)
}
}
}
} | explode_data.jsonl/53556 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 468
} | [
2830,
3393,
3872,
9230,
1454,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
11609,
257,
914,
198,
197,
9859,
414,
1465,
198,
197,
42400,
1807,
198,
197,
9559,
256,
353,
9230,
15872,
1454,
198,
197,
59403,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestNewService(t *testing.T) {
// valid Services
s1, err := NewService("kyc", "KYC", "https://kyc.example.com")
assert.NotNil(t, s1)
assert.NoError(t, err)
s2, err := NewService("kyc", "KYC", "http://kyc.example.com")
assert.NotNil(t, s2)
assert.NoError(t, err)
// invalid Services
s3, err := NewService("", "KYC", "https://kyc.example.com")
assert.Nil(t, s3)
assert.Error(t, err)
s4, err := NewService("kyc", "", "https://kyc.example.com")
assert.Nil(t, s4)
assert.Error(t, err)
s5, err := NewService("kyc", "KYC", "")
assert.Nil(t, s5)
assert.Error(t, err)
s6, err := NewService("kyc", "KYC", "example.com")
assert.Nil(t, s6)
assert.Error(t, err)
} | explode_data.jsonl/51898 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 306
} | [
2830,
3393,
3564,
1860,
1155,
353,
8840,
836,
8,
1476,
197,
322,
2697,
8307,
198,
1903,
16,
11,
1848,
1669,
1532,
1860,
445,
74,
3337,
497,
330,
76585,
34,
497,
330,
2428,
1110,
74,
3337,
7724,
905,
1138,
6948,
93882,
1155,
11,
274,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEventRelationships(t *testing.T) {
deployment := Deploy(t, "msc2836", b.BlueprintFederationOneToOneRoom)
defer deployment.Destroy(t)
// Create the room and send events A,B,C,D
alice := deployment.Client(t, "hs1", "@alice:hs1")
roomID := alice.CreateRoom(t, map[string]interface{}{
"preset": "public_chat",
})
eventA := alice.SendEventSynced(t, roomID, b.Event{
Type: "m.room.message",
Content: map[string]interface{}{
"msgtype": "m.text",
"body": "Message A",
},
})
eventB := alice.SendEventSynced(t, roomID, b.Event{
Type: "m.room.message",
Content: map[string]interface{}{
"msgtype": "m.text",
"body": "Message B",
"m.relationship": map[string]interface{}{
"rel_type": "m.reference",
"event_id": eventA,
},
},
})
eventC := alice.SendEventSynced(t, roomID, b.Event{
Type: "m.room.message",
Content: map[string]interface{}{
"msgtype": "m.text",
"body": "Message C",
"m.relationship": map[string]interface{}{
"rel_type": "m.reference",
"event_id": eventB,
},
},
})
eventD := alice.SendEventSynced(t, roomID, b.Event{
Type: "m.room.message",
Content: map[string]interface{}{
"msgtype": "m.text",
"body": "Message D",
"m.relationship": map[string]interface{}{
"rel_type": "m.reference",
"event_id": eventB,
},
},
})
t.Logf("Event ID A:%s B:%s C:%s D:%s", eventA, eventB, eventC, eventD)
// Join the room from another server
bob := deployment.Client(t, "hs2", "@bob:hs2")
_ = bob.JoinRoom(t, roomID, []string{"hs1"})
// Now hit /event_relationships with eventD
res := bob.MustDo(t, "POST", []string{"_matrix", "client", "unstable", "event_relationships"}, map[string]interface{}{
"event_id": eventD,
"room_id": roomID, // required so the server knows which servers to ask
"direction": "down", // no newer events, so nothing should be added
"include_parent": true, // this should pull in event B
})
var gots []gjson.Result
must.MatchResponse(t, res, match.HTTPResponse{
JSON: []match.JSON{
match.JSONKeyEqual("limited", false),
match.JSONArrayEach("events", func(r gjson.Result) error {
gots = append(gots, r)
return nil
}),
},
})
if len(gots) != 2 {
t.Fatalf("/event_relationships got %d events, want 2", len(gots))
}
if gots[0].Get("event_id").Str != eventD {
t.Fatalf("/event_relationships expected first element to be event D but was %s", gots[0].Raw)
}
if gots[1].Get("event_id").Str != eventB {
t.Fatalf("/event_relationships expected second element to be event B but was %s", gots[1].Raw)
}
// check the children count of event B to make sure it is 2 (C,D)
// and check the hash is correct
checkUnsigned(t, gots[1], map[string]int64{
"m.reference": 2,
}, []string{eventC, eventD})
// now hit /event_relationships again with B, which should return everything (and fetch the missing events A,C)
res = bob.MustDo(t, "POST", []string{"_matrix", "client", "unstable", "event_relationships"}, map[string]interface{}{
"event_id": eventB,
"room_id": roomID, // required so the server knows which servers to ask
"direction": "down", // this pulls in C,D
"include_parent": true, // this pulls in A
"recent_first": false,
})
gots = []gjson.Result{}
must.MatchResponse(t, res, match.HTTPResponse{
JSON: []match.JSON{
match.JSONKeyEqual("limited", false),
match.JSONArrayEach("events", func(r gjson.Result) error {
gots = append(gots, r)
return nil
}),
},
})
if len(gots) != 4 {
t.Fatalf("/event_relationships returned %d events, want 4. %v", len(gots), gots)
}
if gots[0].Get("event_id").Str != eventB {
t.Fatalf("/event_relationships expected first element to be event B but was %s", gots[0].Raw)
}
if gots[1].Get("event_id").Str != eventA {
t.Fatalf("/event_relationships expected second element to be event A but was %s", gots[1].Raw)
}
if gots[2].Get("event_id").Str != eventC {
t.Fatalf("/event_relationships expected third element to be event C but was %s", gots[2].Raw)
}
if gots[3].Get("event_id").Str != eventD {
t.Fatalf("/event_relationships expected fourth element to be event D but was %s", gots[3].Raw)
}
// event A has event B as a child
checkUnsigned(t, gots[1], map[string]int64{
"m.reference": 1,
}, []string{eventB})
// event B has events C,D as children (same as before)
checkUnsigned(t, gots[0], map[string]int64{
"m.reference": 2,
}, []string{eventC, eventD})
} | explode_data.jsonl/62468 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1815
} | [
2830,
3393,
1556,
50822,
82,
1155,
353,
8840,
836,
8,
341,
197,
82213,
1669,
70595,
1155,
11,
330,
1011,
66,
17,
23,
18,
21,
497,
293,
60159,
1350,
37,
96013,
3966,
28190,
14003,
340,
16867,
23172,
57011,
1155,
692,
197,
322,
4230,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFindAndAddNewPods_WithVolumeRetrievalError(t *testing.T) {
// create dswp
dswp, fakePodManager := prepareDswpWithVolume(t)
pluginPVOmittingClient(dswp)
// create pod
containers := []v1.Container{
{
VolumeMounts: []v1.VolumeMount{
{
Name: "dswp-test-volume-name",
MountPath: "/mnt",
},
},
},
}
pod := createPodWithVolume("dswp-test-pod", "dswp-test-volume-name", "file-bound", containers)
fakePodManager.AddPod(pod)
podName := util.GetUniquePodName(pod)
dswp.findAndAddNewPods()
if dswp.podPreviouslyProcessed(podName) {
t.Fatalf("The volumes for the specified pod: %s should not have been processed by the populator", podName)
}
if dswp.podHasBeenSeenOnce(podName) {
t.Fatalf("The volumes for the specified pod: %s should not have been processed by the populator", podName)
}
} | explode_data.jsonl/45746 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 333
} | [
2830,
3393,
9885,
3036,
2212,
3564,
23527,
82,
62,
2354,
18902,
12020,
7231,
831,
1454,
1155,
353,
8840,
836,
8,
341,
197,
322,
1855,
294,
2280,
79,
198,
2698,
2280,
79,
11,
12418,
23527,
2043,
1669,
10549,
35,
2280,
79,
2354,
18902,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestBigRatLit(t *testing.T) {
gopClTest(t, `
var x = 1/2r
`, `package main
import (
builtin "github.com/goplus/gop/builtin"
big "math/big"
)
var x = builtin.Gop_bigrat_Init__2(big.NewRat(1, 2))
`)
} | explode_data.jsonl/73617 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 103
} | [
2830,
3393,
15636,
49,
266,
68954,
1155,
353,
8840,
836,
8,
341,
3174,
453,
5066,
2271,
1155,
11,
22074,
947,
856,
284,
220,
16,
14,
17,
81,
198,
7808,
1565,
1722,
1887,
271,
474,
2399,
2233,
25628,
330,
5204,
905,
4846,
55078,
355,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTLSServerClosesConnection(t *testing.T) {
defer afterTest(t)
closedc := make(chan bool, 1)
ts := httptest.NewTLSServer(HandlerFunc(func(w ResponseWriter, r *Request) {
if strings.Contains(r.URL.Path, "/keep-alive-then-die") {
conn, _, _ := w.(Hijacker).Hijack()
conn.Write([]byte("HTTP/1.1 200 OK\r\nContent-Length: 3\r\n\r\nfoo"))
conn.Close()
closedc <- true
return
}
fmt.Fprintf(w, "hello")
}))
defer ts.Close()
c := ts.Client()
tr := c.Transport.(*Transport)
var nSuccess = 0
var errs []error
const trials = 20
for i := 0; i < trials; i++ {
tr.CloseIdleConnections()
res, err := c.Get(ts.URL + "/keep-alive-then-die")
if err != nil {
t.Fatal(err)
}
<-closedc
slurp, err := ioutil.ReadAll(res.Body)
if err != nil {
t.Fatal(err)
}
if string(slurp) != "foo" {
t.Errorf("Got %q, want foo", slurp)
}
// Now try again and see if we successfully
// pick a new connection.
res, err = c.Get(ts.URL + "/")
if err != nil {
errs = append(errs, err)
continue
}
slurp, err = ioutil.ReadAll(res.Body)
if err != nil {
errs = append(errs, err)
continue
}
nSuccess++
}
if nSuccess > 0 {
t.Logf("successes = %d of %d", nSuccess, trials)
} else {
t.Errorf("All runs failed:")
}
for _, err := range errs {
t.Logf(" err: %v", err)
}
} | explode_data.jsonl/14126 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 613
} | [
2830,
3393,
13470,
1220,
2836,
34,
49341,
4526,
1155,
353,
8840,
836,
8,
341,
16867,
1283,
2271,
1155,
692,
1444,
9259,
66,
1669,
1281,
35190,
1807,
11,
220,
16,
340,
57441,
1669,
54320,
70334,
7121,
13470,
1220,
2836,
7,
3050,
9626,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestUpdateTLS(t *testing.T) {
setup()
defer tearDown()
addrStr := "127.0.0.1:8081"
name := "listener2"
listenerConfig := baseListenerConfig(addrStr, name)
if err := GetListenerAdapterInstance().AddOrUpdateListener(testServerName, listenerConfig); err != nil {
t.Fatalf("add a new listener failed %v", err)
}
time.Sleep(time.Second) // wait listener start
tlsCfg := v2.TLSConfig{
Status: false,
}
// tls handleshake success
dialer := &net.Dialer{
Timeout: time.Second,
}
if conn, err := tls.DialWithDialer(dialer, "tcp", addrStr, &tls.Config{
InsecureSkipVerify: true,
}); err != nil {
t.Fatal("dial tls failed", err)
} else {
conn.Close()
}
if err := GetListenerAdapterInstance().UpdateListenerTLS(testServerName, name, false, []v2.TLSConfig{tlsCfg}); err != nil {
t.Fatalf("update tls listener failed %v", err)
}
handler := listenerAdapterInstance.defaultConnHandler.(*connHandler)
newLn := handler.FindListenerByName(name)
cfg := newLn.Config()
// verify tls changed
if !(reflect.DeepEqual(cfg.FilterChains[0].TLSContexts[0], tlsCfg) &&
cfg.Inspector == false) {
t.Fatal("update tls config not expected")
}
// tls handshake should be failed, because tls is changed to false
if conn, err := tls.DialWithDialer(dialer, "tcp", addrStr, &tls.Config{
InsecureSkipVerify: true,
}); err == nil {
conn.Close()
t.Fatal("listener should not be support tls any more")
}
} | explode_data.jsonl/9407 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 529
} | [
2830,
3393,
4289,
45439,
1155,
353,
8840,
836,
8,
341,
84571,
741,
16867,
32825,
2822,
53183,
2580,
1669,
330,
16,
17,
22,
13,
15,
13,
15,
13,
16,
25,
23,
15,
23,
16,
698,
11609,
1669,
330,
35039,
17,
698,
14440,
798,
2648,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestSetCellStyleAlignment(t *testing.T) {
f, err := prepareTestBook1()
if !assert.NoError(t, err) {
t.FailNow()
}
var style int
style, err = f.NewStyle(`{"alignment":{"horizontal":"center","ident":1,"justify_last_line":true,"reading_order":0,"relative_indent":1,"shrink_to_fit":true,"text_rotation":45,"vertical":"top","wrap_text":true}}`)
if !assert.NoError(t, err) {
t.FailNow()
}
assert.NoError(t, f.SetCellStyle("Sheet1", "A22", "A22", style))
// Test set cell style with given illegal rows number.
assert.EqualError(t, f.SetCellStyle("Sheet1", "A", "A22", style), `cannot convert cell "A" to coordinates: invalid cell name "A"`)
assert.EqualError(t, f.SetCellStyle("Sheet1", "A22", "A", style), `cannot convert cell "A" to coordinates: invalid cell name "A"`)
// Test get cell style with given illegal rows number.
index, err := f.GetCellStyle("Sheet1", "A")
assert.Equal(t, 0, index)
assert.EqualError(t, err, `cannot convert cell "A" to coordinates: invalid cell name "A"`)
assert.NoError(t, f.SaveAs(filepath.Join("test", "TestSetCellStyleAlignment.xlsx")))
} | explode_data.jsonl/36968 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 407
} | [
2830,
3393,
1649,
15171,
7033,
1155,
353,
8840,
836,
8,
341,
1166,
11,
1848,
1669,
10549,
2271,
7134,
16,
741,
743,
753,
2207,
35699,
1155,
11,
1848,
8,
341,
197,
3244,
57243,
7039,
741,
197,
630,
2405,
1707,
526,
198,
42551,
11,
18... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestGetPVCNameFromVolumeMountName(t *testing.T) {
type args struct {
volumeMountName string
dc *appsv1.DeploymentConfig
}
tests := []struct {
name string
args args
want string
}{
{
name: "Test case : Deployment config with given PVC",
args: args{
volumeMountName: "test-pvc",
dc: &appsv1.DeploymentConfig{
Spec: appsv1.DeploymentConfigSpec{
Selector: map[string]string{
"deploymentconfig": "test",
},
Template: &corev1.PodTemplateSpec{
Spec: corev1.PodSpec{
Containers: []corev1.Container{
{
Name: "test",
VolumeMounts: []corev1.VolumeMount{
{
MountPath: "/tmp",
Name: "test-pvc",
},
},
},
},
Volumes: []corev1.Volume{
{
Name: "test-pvc",
VolumeSource: corev1.VolumeSource{
PersistentVolumeClaim: &corev1.PersistentVolumeClaimVolumeSource{
ClaimName: "test-pvc",
},
},
},
},
},
},
},
},
},
want: "test-pvc",
},
{
name: "Test case : Deployment config without given PVC",
args: args{
volumeMountName: "non-existent-pvc",
dc: &appsv1.DeploymentConfig{
Spec: appsv1.DeploymentConfigSpec{
Selector: map[string]string{
"deploymentconfig": "test",
},
Template: &corev1.PodTemplateSpec{
Spec: corev1.PodSpec{
Containers: []corev1.Container{
{
Name: "test",
VolumeMounts: []corev1.VolumeMount{
{
MountPath: "/tmp",
Name: "test-pvc",
},
},
},
},
Volumes: []corev1.Volume{
{
Name: "test-pvc",
VolumeSource: corev1.VolumeSource{
PersistentVolumeClaim: &corev1.PersistentVolumeClaimVolumeSource{
ClaimName: "test-pvc",
},
},
},
},
},
},
},
},
},
want: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
fakeClient, _ := FakeNew()
returnValue := fakeClient.GetPVCNameFromVolumeMountName(tt.args.volumeMountName, tt.args.dc)
// Check for validating return value
if returnValue != tt.want {
t.Errorf("error in return value got: %v, expected %v", returnValue, tt.want)
}
})
}
} | explode_data.jsonl/65139 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1319
} | [
2830,
3393,
1949,
47,
11287,
675,
3830,
18902,
16284,
675,
1155,
353,
8840,
836,
8,
341,
13158,
2827,
2036,
341,
197,
5195,
4661,
16284,
675,
914,
198,
197,
87249,
1060,
353,
676,
3492,
16,
34848,
39130,
2648,
198,
197,
532,
78216,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestTruncateContainerId(t *testing.T) {
testCases := []struct {
containerId string
expected string
}{
{"docker://a1b2c3d4e5f6g7h8i9j0k1l2m3n", "a1b2c3d4e5f6"},
{"docker://a1b2c3d4e5f6", "a1b2c3d4e5f6"},
{"docker://a1b2c3", "a1b2c3"},
{"containerd://a1b2c3d4e5f6g7h8i9j0k1l2m3n", "a1b2c3d4e5f6"},
{"a1b2c3", ""},
{"", ""},
}
for _, test := range testCases {
res := TruncateContainerId(test.containerId)
if res != test.expected {
t.Errorf("containerId=%s, get=%s, but expected=%s", test.containerId, res, test.expected)
}
}
} | explode_data.jsonl/45096 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 299
} | [
2830,
3393,
1282,
26900,
4502,
764,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
53290,
764,
914,
198,
197,
42400,
262,
914,
198,
197,
59403,
197,
197,
4913,
28648,
1110,
64,
16,
65,
17,
66,
18,
67,
19,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestShardClose(t *testing.T) {
elements := []struct {
s *shard
n int
lifetime time.Duration
}{
{
s: &shard{elements: make(map[string]element), q: &queue{}},
n: 1024,
lifetime: time.Minute,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
n: 2048,
lifetime: time.Minute,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
n: 4096,
lifetime: 0,
},
}
for _, e := range elements {
for i := 0; i < e.n; i++ {
e.s.set(fmt.Sprintf("%d", i), i, e.lifetime)
}
size, qsize := e.s.size(), e.s.q.size()
assert.Equal(t, e.s.size(), e.n)
if e.lifetime != 0 {
assert.Equal(t, e.s.q.size(), e.n)
}
fn := 0
e.s.finalizer = func(k string, v interface{}) {
fn++
}
e.s.close()
t.Logf("size/original-size (%d/%d) queue-size/original-queue-size (%d/%d) finalize-count (%d)",
size, e.s.size(), qsize, e.s.q.size(), fn)
assert.Equal(t, fn, e.n)
assert.Equal(t, e.s.size(), 0)
assert.Equal(t, e.s.q.size(), 0)
}
} | explode_data.jsonl/5399 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 577
} | [
2830,
3393,
2016,
567,
7925,
1155,
353,
8840,
836,
8,
341,
197,
21423,
1669,
3056,
1235,
341,
197,
1903,
286,
353,
927,
567,
198,
197,
9038,
286,
526,
198,
197,
8810,
28515,
882,
33795,
198,
197,
59403,
197,
197,
515,
298,
1903,
25,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGameSetupNotOvertakePre(t *testing.T) {
game := Game{gridSize: 4}
gInfo := GameInfo{}
game.setup(gInfo)
if game.score != 0 {
t.Errorf("score %v expected %v", game.score, 0)
}
if game.highScore != 0 {
t.Errorf("high score %v expected %v", game.score, 0)
}
notEmptyCount := 0
for x := 0; x < len(game.grid.cells); x++ {
for y := 0; y < len(game.grid.cells[x]); y++ {
if game.grid.cells[x][y].isEmpty {
if game.grid.cells[x][y].value != 0 {
t.Errorf("when cells is empty, %v should be equal to zero", game.grid.cells[x][y].value)
}
} else {
if game.grid.cells[x][y].value == 0 {
t.Errorf("when cells is empty, %v mustn't be equals to zero", game.grid.cells[x][y].value)
}
notEmptyCount++
}
}
}
if notEmptyCount != 2 {
t.Errorf("Not empty should be %v", 2)
}
} | explode_data.jsonl/59002 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 367
} | [
2830,
3393,
4868,
21821,
2623,
46,
1621,
726,
4703,
1155,
353,
8840,
836,
8,
341,
30677,
1669,
4050,
90,
4203,
1695,
25,
220,
19,
532,
3174,
1731,
1669,
4050,
1731,
16094,
30677,
25338,
3268,
1731,
692,
743,
1809,
21540,
961,
220,
15,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestFloat64SetRangeByScoreWithOpt(t *testing.T) {
z := NewFloat64()
z.Add(1.0, "1")
z.Add(1.1, "2")
z.Add(2.0, "3")
ns := z.RangeByScoreWithOpt(1.0, 2.0, RangeOpt{ExcludeMin: true})
assert.Equal(t, 2, len(ns))
assert.Equal(t, 1.1, ns[0].Score)
assert.Equal(t, 2.0, ns[1].Score)
ns = z.RangeByScoreWithOpt(1.0, 2.0, RangeOpt{ExcludeMin: true, ExcludeMax: true})
assert.Equal(t, 1, len(ns))
assert.Equal(t, 1.1, ns[0].Score)
ns = z.RangeByScoreWithOpt(1.0, 2.0, RangeOpt{ExcludeMax: true})
assert.Equal(t, 2, len(ns))
assert.Equal(t, 1.0, ns[0].Score)
assert.Equal(t, 1.1, ns[1].Score)
ns = z.RangeByScoreWithOpt(2.0, 1.0, RangeOpt{})
assert.Equal(t, 0, len(ns))
ns = z.RangeByScoreWithOpt(2.0, 1.0, RangeOpt{ExcludeMin: true})
assert.Equal(t, 0, len(ns))
ns = z.RangeByScoreWithOpt(2.0, 1.0, RangeOpt{ExcludeMax: true})
assert.Equal(t, 0, len(ns))
ns = z.RangeByScoreWithOpt(1.0, 1.0, RangeOpt{ExcludeMax: true})
assert.Equal(t, 0, len(ns))
ns = z.RangeByScoreWithOpt(1.0, 1.0, RangeOpt{ExcludeMin: true})
assert.Equal(t, 0, len(ns))
ns = z.RangeByScoreWithOpt(1.0, 1.0, RangeOpt{})
assert.Equal(t, 1, len(ns))
} | explode_data.jsonl/25000 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 554
} | [
2830,
3393,
5442,
21,
19,
1649,
6046,
1359,
10570,
2354,
21367,
1155,
353,
8840,
836,
8,
341,
20832,
1669,
1532,
5442,
21,
19,
741,
20832,
1904,
7,
16,
13,
15,
11,
330,
16,
1138,
20832,
1904,
7,
16,
13,
16,
11,
330,
17,
1138,
20... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMsgEditValidator(t *testing.T) {
tests := []struct {
name, moniker, identity, website, securityContact, details string
validatorAddr sdk.ValAddress
expectPass bool
minSelfDelegation sdk.Int
}{
{"basic good", "a", "b", "c", "d", "e", valAddr1, true, sdk.OneInt()},
{"partial description", "", "", "c", "", "", valAddr1, true, sdk.OneInt()},
{"empty description", "", "", "", "", "", valAddr1, false, sdk.OneInt()},
{"empty address", "a", "b", "c", "d", "e", emptyAddr, false, sdk.OneInt()},
{"nil int", "a", "b", "c", "d", "e", emptyAddr, false, sdk.Int{}},
}
for _, tc := range tests {
description := NewDescription(tc.moniker, tc.identity, tc.website, tc.securityContact, tc.details)
newRate := sdk.ZeroDec()
msg := NewMsgEditValidator(tc.validatorAddr, description, &newRate, &tc.minSelfDelegation)
if tc.expectPass {
require.Nil(t, msg.ValidateBasic(), "test: %v", tc.name)
} else {
require.NotNil(t, msg.ValidateBasic(), "test: %v", tc.name)
}
}
} | explode_data.jsonl/22796 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 544
} | [
2830,
3393,
6611,
4036,
14256,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
11,
1615,
24803,
11,
9569,
11,
3910,
11,
4763,
8732,
11,
3565,
914,
198,
197,
197,
16112,
13986,
13463,
45402,
77819,
4286,
198,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestStartAndEndAlphanumeric(t *testing.T) {
cases := []struct {
errorcase.Error
Name string
BucketName string
}{
{
Name: "Empty name is OK",
BucketName: "",
},
{
Name: "Starts and ends with letter is OK",
BucketName: "hello",
},
{
Name: "Starts and ends with number is OK",
BucketName: "0hello1",
},
{
Name: "Starts with period returns error",
BucketName: ".foo",
Error: errorcase.NewErrorAny(),
},
{
Name: "Ends with period returns error",
BucketName: "foo.",
Error: errorcase.NewErrorAny(),
},
{
Name: "Starts with hyphen returns error",
BucketName: "-bar",
Error: errorcase.NewErrorAny(),
},
{
Name: "Ends with hyphen returns error",
BucketName: "bar-",
Error: errorcase.NewErrorAny(),
},
{
Name: "Starts with underscore returns error",
BucketName: "_baz",
Error: errorcase.NewErrorAny(),
},
{
Name: "Ends with underscore returns error",
BucketName: "baz_",
Error: errorcase.NewErrorAny(),
},
}
for _, td := range cases {
err := StartAndEndAlphanumeric(td.BucketName)
_ = errorcase.Eval(t, err, td.Error) // No more test output to evaluate
}
} | explode_data.jsonl/71157 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 581
} | [
2830,
3393,
3479,
3036,
3727,
2101,
65788,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
18290,
5638,
6141,
198,
197,
21297,
981,
914,
198,
197,
12791,
11152,
675,
914,
198,
197,
59403,
197,
197,
515,
298,
2129... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestServer_ShowDatabases_NoAuth(t *testing.T) {
t.Parallel()
s := OpenServer(NewConfig())
defer s.Close()
test := tests.load(t, "show_database_no_auth")
for _, query := range test.queries {
t.Run(query.name, func(t *testing.T) {
if query.skip {
t.Skipf("SKIP:: %s", query.name)
}
if err := query.Execute(s); err != nil {
t.Error(fmt.Sprintf("command: %s - err: %s", query.command, query.Error(err)))
} else if !query.success() {
t.Error(query.failureMessage())
}
})
}
} | explode_data.jsonl/61244 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 224
} | [
2830,
3393,
5475,
79665,
35,
23822,
36989,
5087,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
1903,
1669,
5264,
5475,
35063,
2648,
2398,
16867,
274,
10421,
2822,
18185,
1669,
7032,
5104,
1155,
11,
330,
3445,
27341,
6536,
14014,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestPipelineRunDescribeV1beta1(t *testing.T) {
clock := clockwork.NewFakeClock()
pipelinerunname := "pipeline-run"
taskRuns := []*v1beta1.TaskRun{
{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Name: "tr-1",
Labels: map[string]string{"tekton.dev/task": "task-1"},
},
Spec: v1beta1.TaskRunSpec{
TaskRef: &v1beta1.TaskRef{
Name: "task-1",
},
},
Status: v1beta1.TaskRunStatus{
Status: duckv1beta1.Status{
Conditions: duckv1beta1.Conditions{
{
Status: corev1.ConditionFalse,
Reason: resources.ReasonFailed,
},
},
},
TaskRunStatusFields: v1beta1.TaskRunStatusFields{
StartTime: &metav1.Time{Time: clock.Now()},
CompletionTime: &metav1.Time{Time: clock.Now().Add(5 * time.Minute)},
},
},
},
{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Name: "tr-2",
Labels: map[string]string{"tekton.dev/task": "task-1"},
},
Spec: v1beta1.TaskRunSpec{
TaskRef: &v1beta1.TaskRef{
Name: "task-1",
},
},
Status: v1beta1.TaskRunStatus{
Status: duckv1beta1.Status{
Conditions: duckv1beta1.Conditions{
{
Status: corev1.ConditionTrue,
Reason: resources.ReasonSucceeded,
},
},
},
TaskRunStatusFields: v1beta1.TaskRunStatusFields{
StartTime: &metav1.Time{Time: clock.Now().Add(10 * time.Minute)},
CompletionTime: &metav1.Time{Time: clock.Now().Add(17 * time.Minute)},
},
},
},
}
prun := []*v1beta1.PipelineRun{
{
ObjectMeta: metav1.ObjectMeta{
Name: pipelinerunname,
Namespace: "ns",
},
Spec: v1beta1.PipelineRunSpec{
PipelineRef: &v1beta1.PipelineRef{
Name: "pipeline",
},
Resources: []v1beta1.PipelineResourceBinding{
{
Name: "res-1",
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "test-res",
},
},
{
Name: "res-2",
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "test-res2",
},
},
},
Params: []v1beta1.Param{
{
Name: "p-1",
Value: v1beta1.ArrayOrString{
Type: v1beta1.ParamTypeString,
StringVal: "somethingdifferent",
},
},
{
Name: "p-2",
Value: v1beta1.ArrayOrString{
Type: v1beta1.ParamTypeArray,
ArrayVal: []string{"booms", "booms", "booms"},
},
},
},
},
Status: v1beta1.PipelineRunStatus{
Status: duckv1beta1.Status{
Conditions: duckv1beta1.Conditions{
{
Status: corev1.ConditionTrue,
Reason: resources.ReasonSucceeded,
Message: "Completed",
},
},
},
PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{
StartTime: &metav1.Time{Time: clock.Now()},
CompletionTime: &metav1.Time{Time: clock.Now().Add(20 * time.Minute)},
TaskRuns: map[string]*v1beta1.PipelineRunTaskRunStatus{
"tr-1": {
PipelineTaskName: "t-1",
Status: &taskRuns[0].Status,
},
"tr-2": {
PipelineTaskName: "t-2",
Status: &taskRuns[1].Status,
},
},
},
},
},
}
namespaces := []*corev1.Namespace{
{
ObjectMeta: metav1.ObjectMeta{
Name: "ns",
},
},
}
version := "v1beta1"
tdc := testDynamic.Options{}
dynamic, err := tdc.Client(
cb.UnstructuredV1beta1PR(prun[0], version),
cb.UnstructuredV1beta1TR(taskRuns[0], version),
cb.UnstructuredV1beta1TR(taskRuns[1], version),
)
if err != nil {
t.Errorf("unable to create dynamic client: %v", err)
}
cs, _ := test.SeedV1beta1TestData(t, pipelinev1beta1test.Data{Namespaces: namespaces, PipelineRuns: prun, TaskRuns: taskRuns})
cs.Pipeline.Resources = cb.APIResourceList(version, []string{"pipelinerun", "taskrun"})
p := &test.Params{Tekton: cs.Pipeline, Kube: cs.Kube, Dynamic: dynamic}
pipelinerun := Command(p)
got, err := test.ExecuteCommand(pipelinerun, "desc", "-n", "ns", pipelinerunname)
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
golden.Assert(t, got, fmt.Sprintf("%s.golden", t.Name()))
} | explode_data.jsonl/7916 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2092
} | [
2830,
3393,
34656,
6727,
74785,
53,
16,
19127,
16,
1155,
353,
8840,
836,
8,
341,
84165,
1669,
8866,
1778,
7121,
52317,
26104,
741,
3223,
81079,
10453,
359,
606,
1669,
330,
51258,
22973,
698,
49115,
73920,
1669,
29838,
85,
16,
19127,
16,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestCon(t *testing.T) {
ln, err := net.Listen("tcp", "127.0.0.1:0")
handlerError(t, err)
defer ln.Close()
http.HandleFunc("/hello", helloHandler)
go http.Serve(ln, nil)
resp, err := http.Get("http://" + ln.Addr().String() + "/hello")
handlerError(t, err)
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
handlerError(t, err)
if string(body) != "hello world" {
t.Fatal("expected hello world, but got", string(body))
}
} | explode_data.jsonl/66737 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 187
} | [
2830,
3393,
1109,
1155,
353,
8840,
836,
8,
341,
197,
2261,
11,
1848,
1669,
4179,
68334,
445,
27161,
497,
330,
16,
17,
22,
13,
15,
13,
15,
13,
16,
25,
15,
1138,
53326,
1454,
1155,
11,
1848,
340,
16867,
29390,
10421,
741,
28080,
636... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestRepoAllowlistChecker_IsAllowlisted(t *testing.T) {
cases := []struct {
Description string
Allowlist string
RepoFullName string
Hostname string
Exp bool
}{
{
"exact match",
"github.com/owner/repo",
"owner/repo",
"github.com",
true,
},
{
"exact match shouldn't match anything else",
"github.com/owner/repo",
"owner/rep",
"github.com",
false,
},
{
"* should match anything",
"*",
"owner/repo",
"github.com",
true,
},
{
"github.com* should match anything github",
"github.com*",
"owner/repo",
"github.com",
true,
},
{
"github.com* should not match gitlab",
"github.com*",
"owner/repo",
"gitlab.com",
false,
},
{
"github.com/o* should match",
"github.com/o*",
"owner/repo",
"github.com",
true,
},
{
"github.com/owner/rep* should not match",
"github.com/owner/rep*",
"owner/re",
"github.com",
false,
},
{
"github.com/owner/rep* should match",
"github.com/owner/rep*",
"owner/rep",
"github.com",
true,
},
{
"github.com/o* should not match",
"github.com/o*",
"somethingelse/repo",
"github.com",
false,
},
{
"github.com/owner/repo* should match exactly",
"github.com/owner/repo*",
"owner/repo",
"github.com",
true,
},
{
"github.com/owner/* should match anything in org",
"github.com/owner/*",
"owner/repo",
"github.com",
true,
},
{
"github.com/owner/* should not match anything not in org",
"github.com/owner/*",
"otherorg/repo",
"github.com",
false,
},
{
"if there's any * it should match",
"github.com/owner/repo,*",
"otherorg/repo",
"github.com",
true,
},
{
"any exact match should match",
"github.com/owner/repo,github.com/otherorg/repo",
"otherorg/repo",
"github.com",
true,
},
{
"longer shouldn't match on exact",
"github.com/owner/repo",
"owner/repo-longer",
"github.com",
false,
},
{
"should be case insensitive",
"github.com/owner/repo",
"OwNeR/rEpO",
"github.com",
true,
},
{
"should be case insensitive for wildcards",
"github.com/owner/*",
"OwNeR/rEpO",
"github.com",
true,
},
{
"should match if wildcard is not last character",
"github.com/owner/*-repo",
"owner/prefix-repo",
"github.com",
true,
},
{
"should match if wildcard is first character within owner name",
"github.com/*-owner/repo",
"prefix-owner/repo",
"github.com",
true,
},
{
"should match if wildcard is at beginning",
"*-owner/repo",
"prefix-owner/repo",
"github.com",
true,
},
{
"should match with duplicate",
"*runatlantis",
"runatlantis/runatlantis",
"github.com",
true,
},
}
for _, c := range cases {
t.Run(c.Description, func(t *testing.T) {
w, err := events.NewRepoAllowlistChecker(c.Allowlist)
Ok(t, err)
Equals(t, c.Exp, w.IsAllowlisted(c.RepoFullName, c.Hostname))
})
}
} | explode_data.jsonl/71967 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1471
} | [
2830,
3393,
25243,
18605,
1607,
35188,
31879,
18605,
31240,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
47414,
220,
914,
198,
197,
197,
18605,
1607,
262,
914,
198,
197,
197,
25243,
36217,
914,
198,
197,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCase3(t *testing.T) {
golden := `v=0
o=- 2252310609 2252310609 IN IP4 0.0.0.0
s=Media Server
c=IN IP4 0.0.0.0
t=0 0
a=control:*
a=packetization-supported:DH
a=rtppayload-supported:DH
a=range:npt=now-
m=video 0 RTP/AVP 96
a=control:trackID=0
a=framerate:25.000000
a=rtpmap:96 H264/90000
a=fmtp:96 packetization-mode=1;profile-level-id=4D002A;sprop-parameter-sets=Z00AKp2oHgCJ+WbgICAoAAAfQAAGGoQgAA==,aO48gAA=
a=recvonly
m=audio 0 RTP/AVP 97
a=control:trackID=1
a=rtpmap:97 MPEG4-GENERIC/48000
a=fmtp:97 streamtype=5;profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1188
a=recvonly`
golden = strings.ReplaceAll(golden, "\n", "\r\n")
ctx, err := ParseSdp2LogicContext([]byte(golden))
assert.Equal(t, nil, err)
assert.Equal(t, true, ctx.hasAudio)
assert.Equal(t, true, ctx.hasVideo)
assert.Equal(t, 48000, ctx.AudioClockRate)
assert.Equal(t, 90000, ctx.VideoClockRate)
assert.Equal(t, true, ctx.IsAudioPayloadTypeOrigin(97))
assert.Equal(t, true, ctx.IsVideoPayloadTypeOrigin(96))
assert.Equal(t, base.AvPacketPtAac, ctx.GetAudioPayloadTypeBase())
assert.Equal(t, base.AvPacketPtAvc, ctx.GetVideoPayloadTypeBase())
assert.Equal(t, "trackID=1", ctx.audioAControl)
assert.Equal(t, "trackID=0", ctx.videoAControl)
assert.IsNotNil(t, ctx.Asc)
assert.Equal(t, nil, ctx.Vps)
assert.IsNotNil(t, ctx.Sps)
assert.IsNotNil(t, ctx.Pps)
nazalog.Debugf("%+v", ctx)
} | explode_data.jsonl/55599 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 677
} | [
2830,
30573,
18,
1155,
353,
8840,
836,
8,
341,
3174,
813,
268,
1669,
1565,
85,
28,
15,
198,
78,
10829,
220,
17,
17,
20,
17,
18,
16,
15,
21,
15,
24,
220,
17,
17,
20,
17,
18,
16,
15,
21,
15,
24,
1964,
6790,
19,
220,
15,
13,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestJetStream_ClusterMultipleSubscribe(t *testing.T) {
nodes := []int{1, 3}
replicas := []int{1}
for _, n := range nodes {
for _, r := range replicas {
if r > 1 && n == 1 {
continue
}
t.Run(fmt.Sprintf("qsub n=%d r=%d", n, r), func(t *testing.T) {
name := fmt.Sprintf("MSUB%d%d", n, r)
stream := &nats.StreamConfig{
Name: name,
Replicas: r,
}
withJSClusterAndStream(t, name, n, stream, testJetStream_ClusterMultipleQueueSubscribe)
})
t.Run(fmt.Sprintf("psub n=%d r=%d", n, r), func(t *testing.T) {
name := fmt.Sprintf("PSUBN%d%d", n, r)
stream := &nats.StreamConfig{
Name: name,
Replicas: n,
}
withJSClusterAndStream(t, name, n, stream, testJetStream_ClusterMultiplePullSubscribe)
})
}
}
} | explode_data.jsonl/29183 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 377
} | [
2830,
3393,
35641,
3027,
85110,
4993,
32089,
28573,
1155,
353,
8840,
836,
8,
341,
79756,
1669,
3056,
396,
90,
16,
11,
220,
18,
532,
73731,
52210,
1669,
3056,
396,
90,
16,
630,
2023,
8358,
308,
1669,
2088,
7798,
341,
197,
2023,
8358,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEvent_TaskAddedVerboseEqual(t *testing.T) {
popr := math_rand.New(math_rand.NewSource(time.Now().UnixNano()))
p := NewPopulatedEvent_TaskAdded(popr, false)
data, err := github_com_gogo_protobuf_proto.Marshal(p)
if err != nil {
panic(err)
}
msg := &Event_TaskAdded{}
if err := github_com_gogo_protobuf_proto.Unmarshal(data, msg); err != nil {
panic(err)
}
if err := p.VerboseEqual(msg); err != nil {
t.Fatalf("%#v !VerboseEqual %#v, since %v", msg, p, err)
}
} | explode_data.jsonl/42037 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 213
} | [
2830,
3393,
1556,
77103,
19337,
63404,
2993,
1155,
353,
8840,
836,
8,
341,
3223,
46288,
1669,
6888,
33864,
7121,
37270,
33864,
7121,
3608,
9730,
13244,
1005,
55832,
83819,
12145,
3223,
1669,
1532,
11598,
7757,
1556,
77103,
19337,
40148,
81,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func Test_Controller_Calls_Handler(t *testing.T) {
anyAppName := "test-app"
anyEnvironment := "qa"
// Setup
tu, client, kubeUtil, radixClient, prometheusclient := setupTest()
client.CoreV1().Namespaces().Create(
context.TODO(),
&corev1.Namespace{
ObjectMeta: metav1.ObjectMeta{
Name: utils.GetEnvironmentNamespace(anyAppName, anyEnvironment),
Labels: map[string]string{
kube.RadixAppLabel: anyAppName,
kube.RadixEnvLabel: anyEnvironment,
},
},
},
metav1.CreateOptions{})
stop := make(chan struct{})
synced := make(chan bool)
defer close(stop)
defer close(synced)
radixInformerFactory := informers.NewSharedInformerFactory(radixClient, 0)
kubeInformerFactory := kubeinformers.NewSharedInformerFactory(client, 0)
deploymentHandler := NewHandler(
client,
kubeUtil,
radixClient,
prometheusclient,
WithHasSyncedCallback(func(syncedOk bool) { synced <- syncedOk }),
)
go startDeploymentController(client, kubeUtil, radixClient, radixInformerFactory, kubeInformerFactory, deploymentHandler, stop)
// Test
// Create deployment should sync
rd, _ := tu.ApplyDeployment(
utils.ARadixDeployment().
WithAppName(anyAppName).
WithEnvironment(anyEnvironment))
op, ok := <-synced
assert.True(t, ok)
assert.True(t, op)
syncedRd, _ := radixClient.RadixV1().RadixDeployments(rd.ObjectMeta.Namespace).Get(context.TODO(), rd.GetName(), metav1.GetOptions{})
lastReconciled := syncedRd.Status.Reconciled
assert.Truef(t, !lastReconciled.Time.IsZero(), "Reconciled on status should have been set")
// Update deployment should sync. Only actual updates will be handled by the controller
noReplicas := 0
rd.Spec.Components[0].Replicas = &noReplicas
radixClient.RadixV1().RadixDeployments(rd.ObjectMeta.Namespace).Update(context.TODO(), rd, metav1.UpdateOptions{})
op, ok = <-synced
assert.True(t, ok)
assert.True(t, op)
syncedRd, _ = radixClient.RadixV1().RadixDeployments(rd.ObjectMeta.Namespace).Get(context.TODO(), rd.GetName(), metav1.GetOptions{})
assert.Truef(t, !lastReconciled.Time.IsZero(), "Reconciled on status should have been set")
assert.NotEqual(t, lastReconciled, syncedRd.Status.Reconciled)
lastReconciled = syncedRd.Status.Reconciled
// Delete service should sync
services, _ := client.CoreV1().Services(rd.ObjectMeta.Namespace).List(
context.TODO(),
metav1.ListOptions{
LabelSelector: "radix-app=test-app",
})
for _, aservice := range services.Items {
client.CoreV1().Services(rd.ObjectMeta.Namespace).Delete(context.TODO(), aservice.Name, metav1.DeleteOptions{})
op, ok = <-synced
assert.True(t, ok)
assert.True(t, op)
}
syncedRd, _ = radixClient.RadixV1().RadixDeployments(rd.ObjectMeta.Namespace).Get(context.TODO(), rd.GetName(), metav1.GetOptions{})
assert.Truef(t, !lastReconciled.Time.IsZero(), "Reconciled on status should have been set")
assert.NotEqual(t, lastReconciled, syncedRd.Status.Reconciled)
lastReconciled = syncedRd.Status.Reconciled
teardownTest()
} | explode_data.jsonl/10816 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1135
} | [
2830,
3393,
30674,
920,
5583,
41879,
1155,
353,
8840,
836,
8,
341,
197,
3767,
87994,
1669,
330,
1944,
20023,
698,
197,
3767,
12723,
1669,
330,
15445,
1837,
197,
322,
18626,
198,
3244,
84,
11,
2943,
11,
80958,
2742,
11,
75658,
2959,
11... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSamplerConcurrent(t *testing.T) {
const (
logsPerTick = 10
numMessages = 5
numTicks = 25
numGoroutines = 10
expectedCount = numMessages * logsPerTick * numTicks
)
tick := zaptest.Timeout(10 * time.Millisecond)
cc := &countingCore{}
sampler := NewSampler(cc, tick, logsPerTick, 100000)
var (
done atomic.Bool
wg sync.WaitGroup
)
for i := 0; i < numGoroutines; i++ {
wg.Add(1)
go func(i int) {
defer wg.Done()
for {
if done.Load() {
return
}
msg := fmt.Sprintf("msg%v", i%numMessages)
ent := Entry{Level: DebugLevel, Message: msg, Time: time.Now()}
if ce := sampler.Check(ent, nil); ce != nil {
ce.Write()
}
// Give a chance for other goroutines to run.
time.Sleep(time.Microsecond)
}
}(i)
}
time.AfterFunc(numTicks*tick, func() {
done.Store(true)
})
wg.Wait()
assert.InDelta(
t,
expectedCount,
cc.logs.Load(),
expectedCount/10,
"Unexpected number of logs",
)
} | explode_data.jsonl/39115 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 455
} | [
2830,
3393,
66048,
1109,
3231,
1155,
353,
8840,
836,
8,
341,
4777,
2399,
197,
6725,
82,
3889,
22213,
256,
284,
220,
16,
15,
198,
197,
22431,
15820,
256,
284,
220,
20,
198,
197,
22431,
43784,
414,
284,
220,
17,
20,
198,
197,
22431,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestEditAddedFilmCancelCMD(t *testing.T) {
testTelegramClientInst, answerChan, setUpConn := NewTestMovieBot("./test_data/test_data.sql")
updates := make(chan tgbotapi.Update)
go testTelegramClientInst.EditAddedFilm(updates)
updates <- tgbotapi.Update{Message: &tgbotapi.Message{Text: "", Chat: &tgbotapi.Chat{ID: 100}}}
answer := <-answerChan
expectedAnswer := "Напишите название редактируемого фильма."
if answer != expectedAnswer {
t.Errorf(fmt.Sprintf("Not expected bot answer: %s, expected: %s", answer, expectedAnswer))
return
}
searchedFims := "Film1"
updates <- tgbotapi.Update{Message: &tgbotapi.Message{Text: searchedFims, Chat: &tgbotapi.Chat{ID: 100}}}
answer = <-answerChan
expectedAnswer = fmt.Sprintf("Введите исправленное название для фильма %s", searchedFims)
firstDbCheck, err := getSimpleFilm(setUpConn, searchedFims)
if err != nil {
t.Errorf(fmt.Sprintf("Can't get film %s from db: %s", searchedFims, err.Error()))
return
}
if answer != expectedAnswer {
t.Errorf(fmt.Sprintf("Not expected bot answer: %s, expected: %s", answer, expectedAnswer))
return
}
updates <- tgbotapi.Update{Message: &tgbotapi.Message{Text: "Отмена", Chat: &tgbotapi.Chat{ID: 100}}}
answer = <-answerChan
expectedAnswer = "Команда отменена"
if answer != expectedAnswer {
t.Errorf(fmt.Sprintf("Not expected bot answer: %s, expected: %s", answer, expectedAnswer))
return
}
secondDbCheck, err := getSimpleFilm(setUpConn, searchedFims)
if err != nil {
t.Errorf(fmt.Sprintf("Can't get film %s from db: %s", searchedFims, err.Error()))
return
}
if firstDbCheck.TelegramId != secondDbCheck.TelegramId || firstDbCheck.ID != secondDbCheck.ID || secondDbCheck.Label != searchedFims {
t.Errorf(fmt.Sprintf("Wrong DB result, old tg_id: %d, new tg_id: %d, old id: %d, new id: %d, old label: %s, new label: %s", firstDbCheck.TelegramId, secondDbCheck.TelegramId, firstDbCheck.ID, secondDbCheck.ID, firstDbCheck.Label, secondDbCheck.Label))
return
}
t.Logf("TestEditAddedFilmCancelCMD complete")
} | explode_data.jsonl/19232 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 819
} | [
2830,
3393,
4036,
19337,
51487,
9269,
38680,
1155,
353,
8840,
836,
8,
341,
18185,
72244,
2959,
8724,
11,
4226,
46019,
11,
18620,
9701,
1669,
1532,
2271,
19668,
23502,
13988,
1944,
1769,
12697,
1769,
10045,
1138,
197,
49661,
1669,
1281,
35... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestIxSyncer(t *testing.T) {
syncer := NewIxSyncer(&config.Configuration{ExternalURL: "localhost", Adapters: map[string]config.Adapter{
string(openrtb_ext.BidderIx): {
UserSyncURL: "//ssum-sec.casalemedia.com/usermatchredir?s=184932&cb=localhost%2Fsetuid%3Fbidder%3Dix%26gdpr%3D{{gdpr}}%26gdpr_consent%3D{{gdpr_consent}}%26uid%3D",
},
}})
u := syncer.GetUsersyncInfo("", "")
assert.Equal(t, "//ssum-sec.casalemedia.com/usermatchredir?s=184932&cb=localhost%2Fsetuid%3Fbidder%3Dix%26gdpr%3D%26gdpr_consent%3D%26uid%3D", u.URL)
assert.Equal(t, "redirect", u.Type)
assert.Equal(t, uint16(10), syncer.GDPRVendorID())
assert.Equal(t, false, u.SupportCORS)
} | explode_data.jsonl/78449 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 310
} | [
2830,
3393,
96112,
12154,
261,
1155,
353,
8840,
836,
8,
341,
1903,
1721,
261,
1669,
1532,
96112,
12154,
261,
2099,
1676,
17334,
90,
25913,
3144,
25,
330,
8301,
497,
2410,
17425,
25,
2415,
14032,
60,
1676,
34190,
515,
197,
11357,
30981,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGenerateKey(t *testing.T) {
public, private, err := ed25519.GenerateKey(bytes.NewReader(nullSeed))
require.NoError(t, err)
assert.Equalf(t, public, private.Public(), "private.Public() is not Equal to public")
// calling GenerateKey with the same entropy should return the same keys
pubClone, privClone, _ := ed25519.GenerateKey(bytes.NewReader(nullSeed))
assert.Truef(t, private.Equal(privClone), "private keys are not equal")
assert.Truef(t, public.Equal(pubClone), "public keys are not equal")
// calling ed25519.GenerateKey with different entropy must lead to different keys
pubOther, privOther, _ := ed25519.GenerateKey(bytes.NewReader((&[32]byte{1})[:]))
assert.False(t, public.Equal(pubOther), "private keys are equal")
assert.False(t, private.Equal(privOther), "public keys are equal")
_, _, err = ed25519.GenerateKey(new(bytes.Reader))
assert.Errorf(t, err, "calling GenerateKey(rand) from insufficient entropy is invalid")
} | explode_data.jsonl/55566 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 316
} | [
2830,
3393,
31115,
1592,
1155,
353,
8840,
836,
8,
341,
1219,
11,
869,
11,
1848,
1669,
1578,
17,
20,
20,
16,
24,
57582,
1592,
23158,
68587,
4967,
41471,
1171,
17957,
35699,
1155,
11,
1848,
340,
6948,
12808,
69,
1155,
11,
584,
11,
869... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetRealExecName(t *testing.T) {
a := []struct {
key string
realkey string
}{
{"coins", "coins"},
{"user.p.coins", "user.p.coins"},
{"user.p.guodun.coins", "coins"},
{"user.evm.hash", "evm"},
{"user.p.para.evm.hash", "evm.hash"},
{"user.p.para.user.evm.hash", "evm"},
{"user.p.para.", "user.p.para."},
}
for _, v := range a {
assert.Equal(t, string(GetRealExecName([]byte(v.key))), v.realkey)
}
} | explode_data.jsonl/58327 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 209
} | [
2830,
3393,
1949,
12768,
10216,
675,
1155,
353,
8840,
836,
8,
341,
11323,
1669,
3056,
1235,
341,
197,
23634,
257,
914,
198,
197,
91874,
792,
914,
198,
197,
59403,
197,
197,
4913,
29609,
497,
330,
29609,
7115,
197,
197,
4913,
872,
556,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestApiServerLeaseEndpointReconcilerDisabled(t *testing.T) {
testCases := []string{
`
kubernetesVersion: v1.7.16
`,
`
kubernetesVersion: v1.8.12
`}
for _, testCase := range testCases {
confBody := singleAzConfigYaml + testCase
c, _ := ClusterFromBytes([]byte(confBody))
if enabled, err := c.ApiServerLeaseEndpointReconciler(); enabled == true || err != nil {
t.Errorf("API server lease endpoint should not be enabled prior to Kubernetes 1.9: %s\n%s", err, confBody)
}
}
} | explode_data.jsonl/4380 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 193
} | [
2830,
3393,
6563,
5475,
2304,
519,
27380,
693,
40446,
5769,
25907,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
917,
515,
197,
197,
3989,
74,
29827,
5637,
25,
348,
16,
13,
22,
13,
16,
21,
198,
12892,
197,
197,
3989,
74,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestValidBasic(t *testing.T) {
testvalid(t, "0", true)
testvalid(t, "00", false)
testvalid(t, "-00", false)
testvalid(t, "-.", false)
testvalid(t, "-.123", false)
testvalid(t, "0.0", true)
testvalid(t, "10.0", true)
testvalid(t, "10e1", true)
testvalid(t, "10EE", false)
testvalid(t, "10E-", false)
testvalid(t, "10E+", false)
testvalid(t, "10E123", true)
testvalid(t, "10E-123", true)
testvalid(t, "10E-0123", true)
testvalid(t, "", false)
testvalid(t, " ", false)
testvalid(t, "{}", true)
testvalid(t, "{", false)
testvalid(t, "-", false)
testvalid(t, "-1", true)
testvalid(t, "-1.", false)
testvalid(t, "-1.0", true)
testvalid(t, " -1.0", true)
testvalid(t, " -1.0 ", true)
testvalid(t, "-1.0 ", true)
testvalid(t, "-1.0 i", false)
testvalid(t, "-1.0 i", false)
testvalid(t, "true", true)
testvalid(t, " true", true)
testvalid(t, " true ", true)
testvalid(t, " True ", false)
testvalid(t, " tru", false)
testvalid(t, "false", true)
testvalid(t, " false", true)
testvalid(t, " false ", true)
testvalid(t, " False ", false)
testvalid(t, " fals", false)
testvalid(t, "null", true)
testvalid(t, " null", true)
testvalid(t, " null ", true)
testvalid(t, " Null ", false)
testvalid(t, " nul", false)
testvalid(t, " []", true)
testvalid(t, " [true]", true)
testvalid(t, " [ true, null ]", true)
testvalid(t, " [ true,]", false)
testvalid(t, `{"hello":"world"}`, true)
testvalid(t, `{ "hello": "world" }`, true)
testvalid(t, `{ "hello": "world", }`, false)
testvalid(t, `{"a":"b",}`, false)
testvalid(t, `{"a":"b","a"}`, false)
testvalid(t, `{"a":"b","a":}`, false)
testvalid(t, `{"a":"b","a":1}`, true)
testvalid(t, `{"a":"b",2"1":2}`, false)
testvalid(t, `{"a":"b","a": 1, "c":{"hi":"there"} }`, true)
testvalid(t, `{"a":"b","a": 1, "c":{"hi":"there", "easy":["going",`+
`{"mixed":"bag"}]} }`, true)
testvalid(t, `""`, true)
testvalid(t, `"`, false)
testvalid(t, `"\n"`, true)
testvalid(t, `"\"`, false)
testvalid(t, `"\\"`, true)
testvalid(t, `"a\\b"`, true)
testvalid(t, `"a\\b\\\"a"`, true)
testvalid(t, `"a\\b\\\uFFAAa"`, true)
testvalid(t, `"a\\b\\\uFFAZa"`, false)
testvalid(t, `"a\\b\\\uFFA"`, false)
testvalid(t, string(complicatedJSON), true)
testvalid(t, string(exampleJSON), true)
testvalid(t, "[-]", false)
testvalid(t, "[-.123]", false)
} | explode_data.jsonl/43443 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1054
} | [
2830,
3393,
4088,
15944,
1155,
353,
8840,
836,
8,
341,
18185,
1891,
1155,
11,
330,
15,
497,
830,
340,
18185,
1891,
1155,
11,
330,
15,
15,
497,
895,
340,
18185,
1891,
1155,
11,
6523,
15,
15,
497,
895,
340,
18185,
1891,
1155,
11,
65... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRunsLatestWithtoutError(t *testing.T) {
logs := []string{}
config := &Config{
args: []string{".binary", "latest"},
logf: func(f string, a ...interface{}) {
t.Logf(f+"\n", a...)
logs = append(logs, fmt.Sprintf(f, a...))
},
puzzles: testPuzzles(),
}
err := Run(config)
assert.NoError(t, err)
assert.Contains(t, logs, "There are 3 solutions")
assert.NotContains(t, logs, "000 1 Init")
assert.NotContains(t, logs, "000 1.1 - 1")
assert.NotContains(t, logs, "000 1.2 - 2")
assert.NotContains(t, logs, "000 1.3 - 3")
assert.NotContains(t, logs, "000 1 Tidy")
assert.NotContains(t, logs, "000 2.1 - 4")
assert.NotContains(t, logs, "000 2.2 - 5")
assert.NotContains(t, logs, "000 2.3 - 6")
assert.NotContains(t, logs, "000 2 Tidy")
assert.Contains(t, logs, "000 3 Init")
assert.Contains(t, logs, "000 3.1 - 7")
assert.Contains(t, logs, "000 3.2 - 8")
assert.Contains(t, logs, "000 3.3 - 9")
} | explode_data.jsonl/67961 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 432
} | [
2830,
3393,
73920,
31992,
2354,
66333,
1454,
1155,
353,
8840,
836,
8,
341,
6725,
82,
1669,
3056,
917,
16094,
25873,
1669,
609,
2648,
515,
197,
31215,
25,
3056,
917,
90,
3263,
25891,
497,
330,
19350,
7115,
197,
6725,
69,
25,
2915,
955,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestOutputService9ProtocolTestSupportsHeaderMapsCase1(t *testing.T) {
svc := NewOutputService9ProtocolTest(nil)
buf := bytes.NewReader([]byte("{}"))
req, out := svc.OutputService9TestCaseOperation1Request(nil)
req.HTTPResponse = &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
// set headers
req.HTTPResponse.Header.Set("Content-Length", "10")
req.HTTPResponse.Header.Set("X-bam", "boo")
req.HTTPResponse.Header.Set("x-Foo", "bar")
// unmarshal response
restjson.UnmarshalMeta(req)
restjson.Unmarshal(req)
assert.NoError(t, req.Error)
// assert response
assert.NotNil(t, out) // ensure out variable is used
assert.Equal(t, "10", *out.AllHeaders["Content-Length"])
assert.Equal(t, "boo", *out.AllHeaders["X-Bam"])
assert.Equal(t, "bar", *out.AllHeaders["X-Foo"])
assert.Equal(t, "boo", *out.PrefixedHeaders["Bam"])
assert.Equal(t, "bar", *out.PrefixedHeaders["Foo"])
} | explode_data.jsonl/8447 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 378
} | [
2830,
3393,
5097,
1860,
24,
20689,
2271,
7916,
82,
4047,
36562,
4207,
16,
1155,
353,
8840,
836,
8,
341,
1903,
7362,
1669,
1532,
5097,
1860,
24,
20689,
2271,
27907,
692,
26398,
1669,
5820,
68587,
10556,
3782,
53430,
5455,
24395,
11,
700,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestClient(t *testing.T) {
opts := &redis.Options{Addr: "127.0.0.1:6379"}
assert := assert.New(t)
mt := mocktracer.Start()
defer mt.Stop()
client := NewClient(opts, WithServiceName("my-redis"))
client.Set("test_key", "test_value", 0)
spans := mt.FinishedSpans()
assert.Len(spans, 1)
span := spans[0]
assert.Equal("redis.command", span.OperationName())
assert.Equal(ext.SpanTypeRedis, span.Tag(ext.SpanType))
assert.Equal("my-redis", span.Tag(ext.ServiceName))
assert.Equal("127.0.0.1", span.Tag(ext.TargetHost))
assert.Equal("6379", span.Tag(ext.TargetPort))
assert.Equal("set test_key test_value: ", span.Tag("redis.raw_command"))
assert.Equal("3", span.Tag("redis.args_length"))
} | explode_data.jsonl/43235 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 287
} | [
2830,
3393,
2959,
1155,
353,
8840,
836,
8,
341,
64734,
1669,
609,
21748,
22179,
90,
13986,
25,
330,
16,
17,
22,
13,
15,
13,
15,
13,
16,
25,
21,
18,
22,
24,
16707,
6948,
1669,
2060,
7121,
1155,
340,
2109,
83,
1669,
7860,
94941,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEndpointsList(t *testing.T) {
client, err := clients.NewIdentityV3Client()
if err != nil {
t.Fatalf("Unable to obtain an identity client: %v")
}
allPages, err := endpoints.List(client, nil).AllPages()
if err != nil {
t.Fatalf("Unable to list endpoints: %v", err)
}
allEndpoints, err := endpoints.ExtractEndpoints(allPages)
if err != nil {
t.Fatalf("Unable to extract endpoints: %v", err)
}
for _, endpoint := range allEndpoints {
tools.PrintResource(t, endpoint)
}
} | explode_data.jsonl/20571 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 206
} | [
2830,
3393,
80786,
852,
1155,
353,
8840,
836,
8,
972,
25291,
11,
1848,
1669,
8239,
7121,
18558,
53,
18,
2959,
3568,
743,
1848,
961,
2092,
972,
197,
3244,
30762,
445,
17075,
311,
6851,
458,
9569,
2943,
25,
1018,
85,
6060,
197,
2570,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestData(t *testing.T) {
mt := NewMockTerminal(1)
defer mt.Close()
testData := "hello\nworld\n"
n, err := mt.Write([]byte(testData))
if err != nil {
t.Errorf("unexpected error %v", err)
return
}
if n != len(testData) {
t.Errorf("wanted to write %d bytes, wrote %d bytes", len(testData), n)
return
}
recorded := mt.RecordedData()
if string(recorded) != testData {
t.Errorf("unexpected recorded data %v", recorded)
return
}
} | explode_data.jsonl/80613 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 183
} | [
2830,
93200,
1155,
353,
8840,
836,
8,
341,
2109,
83,
1669,
1532,
11571,
47890,
7,
16,
340,
16867,
11965,
10421,
2822,
18185,
1043,
1669,
330,
14990,
1699,
14615,
1699,
698,
9038,
11,
1848,
1669,
11965,
4073,
10556,
3782,
8623,
1043,
117... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestBitfinex_GetDepth(t *testing.T) {
dep, _ := bfx.GetDepth(2, goex.ETH_BTC)
t.Log(dep.AskList)
t.Log(dep.BidList)
} | explode_data.jsonl/22716 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 62
} | [
2830,
3393,
8344,
5472,
327,
13614,
19776,
1155,
353,
8840,
836,
8,
341,
197,
14891,
11,
716,
1669,
293,
8298,
2234,
19776,
7,
17,
11,
728,
327,
13,
7625,
1668,
7749,
340,
3244,
5247,
78772,
875,
4886,
852,
340,
3244,
5247,
78772,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestStream_PulsarMsgStream_Search(t *testing.T) {
pulsarAddress, _ := Params.Load("_PulsarAddress")
c := funcutil.RandomString(8)
producerChannels := []string{c}
consumerChannels := []string{c}
consumerSubName := funcutil.RandomString(8)
msgPack := MsgPack{}
msgPack.Msgs = append(msgPack.Msgs, getTsMsg(commonpb.MsgType_Search, 1))
msgPack.Msgs = append(msgPack.Msgs, getTsMsg(commonpb.MsgType_Search, 3))
inputStream := getPulsarInputStream(pulsarAddress, producerChannels)
outputStream := getPulsarOutputStream(pulsarAddress, consumerChannels, consumerSubName)
err := inputStream.Produce(&msgPack)
if err != nil {
log.Fatalf("produce error = %v", err)
}
receiveMsg(outputStream, len(msgPack.Msgs))
inputStream.Close()
outputStream.Close()
} | explode_data.jsonl/55288 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 279
} | [
2830,
3393,
3027,
1088,
14295,
277,
6611,
3027,
65913,
1155,
353,
8840,
836,
8,
341,
3223,
14295,
277,
4286,
11,
716,
1669,
34352,
13969,
16975,
47,
14295,
277,
4286,
1138,
1444,
1669,
2915,
1314,
26709,
703,
7,
23,
340,
197,
58912,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGenerateModels(t *testing.T) {
t.Parallel()
defer discardOutput()()
cwd := testCwd(t)
const root = "generated_models"
defer func() {
_ = os.RemoveAll(filepath.Join(cwd, root))
}()
t.Run("generate models", func(t *testing.T) {
cases := map[string]generateFixture{
"allDefinitions": {
spec: "../fixtures/bugs/1042/fixture-1042.yaml",
target: "../fixtures/bugs/1042",
verify: func(t testing.TB, target string) {
target = filepath.Join(target, defaultModelsTarget)
require.True(t, fileExists(target, ""))
assert.True(t, fileExists(target, "a.go"))
assert.True(t, fileExists(target, "b.go"))
},
},
"acceptDefinitions": {
spec: "../fixtures/enhancements/2333/fixture-definitions.yaml",
target: "../fixtures/enhancements/2333",
prepare: func(_ testing.TB, opts *GenOpts) {
opts.AcceptDefinitionsOnly = true
},
verify: func(t testing.TB, target string) {
target = filepath.Join(target, defaultModelsTarget)
require.True(t, fileExists(target, ""))
assert.True(t, fileExists(target, "model_interface.go"))
assert.True(t, fileExists(target, "records_model.go"))
assert.True(t, fileExists(target, "records_model_with_max.go"))
assert.False(t, fileExists(target, "restapi"))
},
},
}
for k, cas := range cases {
name := k
thisCas := cas
t.Run(name, func(t *testing.T) {
t.Parallel()
defer thisCas.warnFailed(t)
opts := testGenOpts()
defer thisCas.prepareTarget(t, name, "model_test", root, opts)()
if thisCas.prepare != nil {
thisCas.prepare(t, opts)
}
t.Logf("generating test models at: %s", opts.Target)
err := GenerateModels([]string{"", ""}, opts) // NOTE: generate all models, ignore ""
if thisCas.wantError {
require.Errorf(t, err, "expected an error for models build fixture: %s", opts.Spec)
} else {
require.NoError(t, err, "unexpected error for models build fixture: %s", opts.Spec)
}
if thisCas.verify != nil {
thisCas.verify(t, opts.Target)
}
})
}
})
} | explode_data.jsonl/2577 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 911
} | [
2830,
3393,
31115,
16969,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
16867,
37867,
5097,
368,
2822,
1444,
6377,
1669,
1273,
34,
6377,
1155,
340,
4777,
3704,
284,
330,
16187,
30792,
698,
16867,
2915,
368,
341,
197,
197,
62,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetAccountWithdrawalFeeForCurrency(t *testing.T) {
resp, err := o.GetAccountWithdrawalFee(currency.BTC.String())
if areTestAPIKeysSet() {
if err != nil {
t.Error(err)
}
if len(resp) != 1 {
t.Error("Expected fee for one currency")
}
} else if !areTestAPIKeysSet() && err == nil {
t.Error("Expecting an error when no keys are set")
}
} | explode_data.jsonl/30144 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 143
} | [
2830,
3393,
1949,
7365,
92261,
278,
41941,
2461,
26321,
1155,
353,
8840,
836,
8,
341,
34653,
11,
1848,
1669,
297,
2234,
7365,
92261,
278,
41941,
90475,
1785,
7749,
6431,
2398,
743,
525,
2271,
7082,
8850,
1649,
368,
341,
197,
743,
1848,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestWorkerBasics(t *testing.T) {
pool := newTestPool(":6379")
ns := "work"
job1 := "job1"
job2 := "job2"
job3 := "job3"
cleanKeyspace(ns, pool)
var arg1 float64
var arg2 float64
var arg3 float64
jobTypes := make(map[string]*jobType)
jobTypes[job1] = &jobType{
Name: job1,
JobOptions: JobOptions{Priority: 1},
IsGeneric: true,
GenericHandler: func(job *Job) error {
arg1 = job.Args["a"].(float64)
return nil
},
}
jobTypes[job2] = &jobType{
Name: job2,
JobOptions: JobOptions{Priority: 1},
IsGeneric: true,
GenericHandler: func(job *Job) error {
arg2 = job.Args["a"].(float64)
return nil
},
}
jobTypes[job3] = &jobType{
Name: job3,
JobOptions: JobOptions{Priority: 1},
IsGeneric: true,
GenericHandler: func(job *Job) error {
arg3 = job.Args["a"].(float64)
return nil
},
}
enqueuer := NewEnqueuer(ns, pool)
_, err := enqueuer.Enqueue(job1, Q{"a": 1})
assert.Nil(t, err)
_, err = enqueuer.Enqueue(job2, Q{"a": 2})
assert.Nil(t, err)
_, err = enqueuer.Enqueue(job3, Q{"a": 3})
assert.Nil(t, err)
w := newWorker(ns, "1", pool, tstCtxType, nil, jobTypes, nil)
w.start()
w.drain()
w.stop()
// make sure the jobs ran (side effect of setting these variables to the job arguments)
assert.EqualValues(t, 1.0, arg1)
assert.EqualValues(t, 2.0, arg2)
assert.EqualValues(t, 3.0, arg3)
// nothing in retries or dead
assert.EqualValues(t, 0, zsetSize(pool, redisKeyRetry(ns)))
assert.EqualValues(t, 0, zsetSize(pool, redisKeyDead(ns)))
// Nothing in the queues or in-progress queues
assert.EqualValues(t, 0, listSize(pool, redisKeyJobs(ns, job1)))
assert.EqualValues(t, 0, listSize(pool, redisKeyJobs(ns, job2)))
assert.EqualValues(t, 0, listSize(pool, redisKeyJobs(ns, job3)))
assert.EqualValues(t, 0, listSize(pool, redisKeyJobsInProgress(ns, "1", job1)))
assert.EqualValues(t, 0, listSize(pool, redisKeyJobsInProgress(ns, "1", job2)))
assert.EqualValues(t, 0, listSize(pool, redisKeyJobsInProgress(ns, "1", job3)))
// nothing in the worker status
h := readHash(pool, redisKeyWorkerObservation(ns, w.workerID))
assert.EqualValues(t, 0, len(h))
} | explode_data.jsonl/35595 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 928
} | [
2830,
3393,
21936,
33603,
1211,
1155,
353,
8840,
836,
8,
341,
85273,
1669,
501,
2271,
10551,
18893,
21,
18,
22,
24,
1138,
84041,
1669,
330,
1778,
698,
68577,
16,
1669,
330,
8799,
16,
698,
68577,
17,
1669,
330,
8799,
17,
698,
68577,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDirMgrCollection_AddDirMgrCollection_02(t *testing.T) {
dMgrs1 := DirMgrCollection{}
dMgrs1.dirMgrs = nil
dMgrs2 := DirMgrCollection{}
dMgrs2.dirMgrs = nil
dMgrs1.AddDirMgrCollection(&dMgrs2)
if dMgrs1.dirMgrs == nil {
t.Error("ERROR: dMgrs1.dirMgrs == nil !!!\n")
return
}
if dMgrs2.dirMgrs == nil {
t.Error("ERROR: dMgrs2.dirMgrs == nil !!!\n")
return
}
if len(dMgrs1.dirMgrs) != 0 {
t.Errorf("ERROR: Expected len(dMgrs1.dirMgrs) == '0'.\n" +
"Instead, len(dMgrs1.dirMgrs) == '%v'", len(dMgrs1.dirMgrs))
}
} | explode_data.jsonl/61224 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 300
} | [
2830,
3393,
6184,
25567,
6482,
21346,
6184,
25567,
6482,
62,
15,
17,
1155,
353,
8840,
836,
8,
1476,
220,
294,
25567,
82,
16,
1669,
30094,
25567,
6482,
16094,
220,
294,
25567,
82,
16,
14395,
25567,
82,
284,
2092,
271,
220,
294,
25567,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestRunOnce(t *testing.T) {
namespaces := []*v1.Namespace{
{
ObjectMeta: metav1.ObjectMeta{
Name: "default",
},
},
}
controller := &PDBController{
Interface: setupMockKubernetes(t, nil, nil, nil, namespaces, nil),
}
err := controller.runOnce()
if err != nil {
t.Error(err)
}
} | explode_data.jsonl/53281 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 134
} | [
2830,
3393,
6727,
12522,
1155,
353,
8840,
836,
8,
341,
93940,
27338,
1669,
29838,
85,
16,
46011,
515,
197,
197,
515,
298,
23816,
12175,
25,
77520,
16,
80222,
515,
571,
21297,
25,
330,
2258,
756,
298,
197,
1583,
197,
197,
1583,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestCollection_ReplaceOne(t *testing.T) {
mgoClient := Ins()
result, err := mgoClient.C("test").ReplaceOne(bson.M{"name": "sheldon"}, bson.M{"name": "lily", "age": 18})
if err != nil {
t.Errorf("ReplaceOne error: %s", err)
t.FailNow()
}
t.Log("ReplaceOne ok: ", result)
} | explode_data.jsonl/30106 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 120
} | [
2830,
3393,
6482,
62,
23107,
3966,
1155,
353,
8840,
836,
8,
341,
2109,
3346,
2959,
1669,
9726,
2822,
9559,
11,
1848,
1669,
296,
3346,
2959,
727,
445,
1944,
1827,
23107,
3966,
1883,
930,
1321,
4913,
606,
788,
330,
927,
55043,
14345,
50... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestICMPFilter(t *testing.T) {
switch runtime.GOOS {
case "linux":
default:
t.Skipf("not supported on %s", runtime.GOOS)
}
var f ipv4.ICMPFilter
for _, toggle := range []bool{false, true} {
f.SetAll(toggle)
for _, typ := range []ipv4.ICMPType{
ipv4.ICMPTypeDestinationUnreachable,
ipv4.ICMPTypeEchoReply,
ipv4.ICMPTypeTimeExceeded,
ipv4.ICMPTypeParameterProblem,
} {
f.Accept(typ)
if f.WillBlock(typ) {
t.Errorf("ipv4.ICMPFilter.Set(%v, false) failed", typ)
}
f.Block(typ)
if !f.WillBlock(typ) {
t.Errorf("ipv4.ICMPFilter.Set(%v, true) failed", typ)
}
}
}
} | explode_data.jsonl/55241 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 299
} | [
2830,
3393,
1317,
5781,
5632,
1155,
353,
8840,
836,
8,
341,
8961,
15592,
97574,
3126,
341,
2722,
330,
14210,
4660,
11940,
510,
197,
3244,
57776,
69,
445,
1921,
7248,
389,
1018,
82,
497,
15592,
97574,
3126,
340,
197,
630,
2405,
282,
45... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func Test0(t *testing.T) {
p := []int{1, 2, 3}
c := []int{0, 1, 1}
assert.Equal(t, 4, findMaximizedCapital(2, 0, p, c))
} | explode_data.jsonl/7664 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 64
} | [
2830,
3393,
15,
1155,
353,
8840,
836,
8,
341,
3223,
1669,
3056,
396,
90,
16,
11,
220,
17,
11,
220,
18,
532,
1444,
1669,
3056,
396,
90,
15,
11,
220,
16,
11,
220,
16,
532,
6948,
12808,
1155,
11,
220,
19,
11,
1477,
5974,
45706,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestAPIGetGitHook(t *testing.T) {
defer prepareTestEnv(t)()
repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 37}).(*models.Repository)
owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User)
// user1 is an admin user
session := loginUser(t, "user1")
token := getTokenForLoggedInUser(t, session)
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/hooks/git/pre-receive?token=%s",
owner.Name, repo.Name, token)
resp := MakeRequest(t, req, http.StatusOK)
var apiGitHook *api.GitHook
DecodeJSON(t, resp, &apiGitHook)
assert.True(t, apiGitHook.IsActive)
assert.Equal(t, testHookContent, apiGitHook.Content)
} | explode_data.jsonl/32982 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 282
} | [
2830,
3393,
7082,
1949,
46562,
31679,
1155,
353,
8840,
836,
8,
341,
16867,
10549,
2271,
14359,
1155,
8,
2822,
17200,
5368,
1669,
4119,
11711,
15575,
3036,
5879,
10437,
1155,
11,
609,
6507,
25170,
90,
915,
25,
220,
18,
22,
16630,
4071,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestProtodesc_GetMethodDescFromProto(t *testing.T) {
t.Run("invalid path", func(t *testing.T) {
md, err := GetMethodDescFromProto("pkg.Call", "invalid.proto", []string{})
assert.Error(t, err)
assert.Nil(t, md)
})
t.Run("invalid call symbol", func(t *testing.T) {
md, err := GetMethodDescFromProto("pkg.Call", "../testdata/greeter.proto", []string{})
assert.Error(t, err)
assert.Nil(t, md)
})
t.Run("invalid package", func(t *testing.T) {
md, err := GetMethodDescFromProto("helloworld.pkg.SayHello", "../testdata/greeter.proto", []string{})
assert.Error(t, err)
assert.Nil(t, md)
})
t.Run("invalid method", func(t *testing.T) {
md, err := GetMethodDescFromProto("helloworld.Greeter.Foo", "../testdata/greeter.proto", []string{})
assert.Error(t, err)
assert.Nil(t, md)
})
t.Run("valid symbol", func(t *testing.T) {
md, err := GetMethodDescFromProto("helloworld.Greeter.SayHello", "../testdata/greeter.proto", []string{})
assert.NoError(t, err)
assert.NotNil(t, md)
})
t.Run("valid symbol slashes", func(t *testing.T) {
md, err := GetMethodDescFromProto("helloworld.Greeter/SayHello", "../testdata/greeter.proto", []string{})
assert.NoError(t, err)
assert.NotNil(t, md)
})
t.Run("proto3 optional support", func(t *testing.T) {
md, err := GetMethodDescFromProto("helloworld.OptionalGreeter/SayHello", "../testdata/optional.proto", []string{})
assert.NoError(t, err)
assert.NotNil(t, md)
})
} | explode_data.jsonl/10864 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 602
} | [
2830,
3393,
12423,
2539,
66,
13614,
3523,
11065,
3830,
31549,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
11808,
1815,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
84374,
11,
1848,
1669,
2126,
3523,
11065,
3830,
31549,
445,
30069,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFormatFuncs(t *testing.T) {
cases := []struct {
description string
funcName string
input string
expected string
}{
{
description: "snake",
funcName: "snake",
input: "SomethingCool",
expected: "something_cool",
},
{
description: "camel",
funcName: "camel",
input: "something_cool",
expected: "somethingCool",
},
{
description: "upper",
funcName: "upper",
input: "IDontKnow",
expected: "IDONTKNOW",
},
{
description: "lower",
funcName: "lower",
input: "ThisTest",
expected: "thistest",
},
{
description: "first (keep upper)",
funcName: "first",
input: "FirstLetter",
expected: "F",
},
{
description: "first (keep lower)",
funcName: "first",
input: "firstLetter",
expected: "f",
},
{
description: "first upper",
funcName: "first-upper",
input: "firstLetter",
expected: "F",
},
{
description: "first lower",
funcName: "first-lower",
input: "FirstLetter",
expected: "f",
},
{
description: "capitalize first letter",
funcName: "capitalize-first",
input: "TheFirstLetterIsUpper",
expected: "The first letter is upper",
},
{
description: "capitalize all letter letters",
funcName: "capitalize-all",
input: "TheFirstLetterIsUpper",
expected: "The First Letter Is Upper",
},
}
funcs := FormatFuncs()
for _, tc := range cases {
t.Run(tc.description, func(t *testing.T) {
f, ok := funcs[tc.funcName]
require.True(t, ok)
assert.Equal(t, tc.expected, f(tc.input))
})
}
} | explode_data.jsonl/38387 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 791
} | [
2830,
3393,
4061,
9626,
82,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
42407,
914,
198,
197,
29244,
675,
262,
914,
198,
197,
22427,
981,
914,
198,
197,
42400,
262,
914,
198,
197,
59403,
197,
197,
515,
298,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGatherTokenMetric(t *testing.T) {
GatherTokenMetric("BNB", "0xB8c77482e45F1F44dE1745F52C74426C631bDD52", gocron.Every(20).Seconds())
gocron.Start()
time.Sleep(35 * time.Second)
} | explode_data.jsonl/15048 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 90
} | [
2830,
3393,
38,
1856,
3323,
54310,
1155,
353,
8840,
836,
8,
341,
9600,
1856,
3323,
54310,
445,
15594,
33,
497,
330,
15,
14377,
23,
66,
22,
22,
19,
23,
17,
68,
19,
20,
37,
16,
37,
19,
19,
67,
36,
16,
22,
19,
20,
37,
20,
17,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_splitFileName(t *testing.T) {
tests := []struct {
name string
filename string
wantName string
wantVer string
wantRel string
wantErr bool
}{
{
name: "valid name",
filename: "glibc-2.17-307.el7.1.src.rpm",
wantName: "glibc",
wantVer: "2.17",
wantRel: "307.el7.1",
wantErr: false,
},
{
name: "invalid name",
filename: "elasticsearch-5.6.16-1-src.rpm",
wantName: "",
wantVer: "",
wantRel: "",
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotName, gotVer, gotRel, err := splitFileName(tt.filename)
if tt.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
assert.Equal(t, tt.wantName, gotName)
assert.Equal(t, tt.wantVer, gotVer)
assert.Equal(t, tt.wantRel, gotRel)
})
}
} | explode_data.jsonl/14266 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 429
} | [
2830,
3393,
17052,
10903,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
257,
914,
198,
197,
66434,
914,
198,
197,
50780,
675,
914,
198,
197,
50780,
10141,
220,
914,
198,
197,
50780,
6740,
220,
914,
198,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestAccAWSWafSizeConstraintSet_basic(t *testing.T) {
var v waf.SizeConstraintSet
sizeConstraintSet := fmt.Sprintf("sizeConstraintSet-%s", acctest.RandString(5))
resourceName := "aws_waf_size_constraint_set.size_constraint_set"
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSWaf(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSWafSizeConstraintSetDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSWafSizeConstraintSetConfig(sizeConstraintSet),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSWafSizeConstraintSetExists(resourceName, &v),
testAccMatchResourceAttrGlobalARN(resourceName, "arn", "waf", regexp.MustCompile(`sizeconstraintset/.+`)),
resource.TestCheckResourceAttr(resourceName, "name", sizeConstraintSet),
resource.TestCheckResourceAttr(resourceName, "size_constraints.#", "1"),
resource.TestCheckTypeSetElemNestedAttrs(resourceName, "size_constraints.*", map[string]string{
"comparison_operator": "EQ",
"field_to_match.#": "1",
"size": "4096",
"text_transformation": "NONE",
}),
resource.TestCheckTypeSetElemNestedAttrs(resourceName, "size_constraints.*.field_to_match.*", map[string]string{
"data": "",
"type": "BODY",
}),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
} | explode_data.jsonl/45415 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 633
} | [
2830,
3393,
14603,
36136,
54,
2577,
1695,
17890,
1649,
34729,
1155,
353,
8840,
836,
8,
341,
2405,
348,
289,
2577,
2465,
17890,
1649,
198,
13832,
17890,
1649,
1669,
8879,
17305,
445,
2141,
17890,
1649,
11069,
82,
497,
1613,
67880,
2013,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSubscribeSyncRace(t *testing.T) {
s := RunServerOnPort(TEST_PORT)
defer s.Shutdown()
nc, err := Connect(fmt.Sprintf("localhost:%d", TEST_PORT))
if err != nil {
t.Fatalf("Error on connect: %v", err)
}
defer nc.Close()
go func() {
time.Sleep(time.Millisecond)
nc.Close()
}()
subj := "foo.sync.race"
for i := 0; i < 10000; i++ {
if _, err := nc.SubscribeSync(subj); err != nil {
break
}
if _, err := nc.QueueSubscribeSync(subj, "gc"); err != nil {
break
}
}
} | explode_data.jsonl/44933 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 220
} | [
2830,
3393,
28573,
12154,
55991,
1155,
353,
8840,
836,
8,
341,
1903,
1669,
6452,
5475,
1925,
7084,
50320,
12377,
340,
16867,
274,
10849,
18452,
2822,
197,
1016,
11,
1848,
1669,
13015,
28197,
17305,
445,
8301,
7533,
67,
497,
13602,
12377,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestStreamParser_InvalidXPath(t *testing.T) {
sp, err := CreateStreamParser(strings.NewReader(""), "[invalid")
if err == nil || err.Error() != "invalid streamElementXPath '[invalid', err: expression must evaluate to a node-set" {
t.Fatalf("got non-expected error: %v", err)
}
if sp != nil {
t.Fatal("expected nil for sp, but got none-nil value")
}
sp, err = CreateStreamParser(strings.NewReader(""), ".", "[invalid")
if err == nil || err.Error() != "invalid streamElementFilter '[invalid', err: expression must evaluate to a node-set" {
t.Fatalf("got non-expected error: %v", err)
}
if sp != nil {
t.Fatal("expected nil for sp, but got none-nil value")
}
} | explode_data.jsonl/18848 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 233
} | [
2830,
3393,
3027,
6570,
62,
7928,
76531,
1155,
353,
8840,
836,
8,
341,
41378,
11,
1848,
1669,
4230,
3027,
6570,
51442,
68587,
86076,
10545,
11808,
1138,
743,
1848,
621,
2092,
1369,
1848,
6141,
368,
961,
330,
11808,
4269,
1691,
76531,
18... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestNotILikeToSql(t *testing.T) {
b := NotILike{"name": "sq%"}
sql, args, err := b.ToSql()
assert.NoError(t, err)
expectedSql := "name NOT ILIKE ?"
assert.Equal(t, expectedSql, sql)
expectedArgs := []interface{}{"sq%"}
assert.Equal(t, expectedArgs, args)
} | explode_data.jsonl/44180 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 113
} | [
2830,
3393,
2623,
1715,
2970,
1249,
8269,
1155,
353,
8840,
836,
8,
341,
2233,
1669,
2806,
1715,
2970,
4913,
606,
788,
330,
28343,
15794,
532,
30633,
11,
2827,
11,
1848,
1669,
293,
3274,
8269,
741,
6948,
35699,
1155,
11,
1848,
692,
424... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServerSoftReboot(t *testing.T) {
setup()
defer teardown()
var svr *server
mux.HandleFunc(testlib.CloudServerURL(svr.itemActionPath("6768e664-7e3e-11ea-ba40-ffdde7ae9a5b")), func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, http.MethodPost, r.Method)
var sa *ServerAction
require.NoError(t, json.NewDecoder(r.Body).Decode(&sa))
assert.Equal(t, "soft_reboot", sa.Action)
resp := `
{
"message": "Soft reboot server th\u00e0nh c\u00f4ng"
}
`
_, _ = fmt.Fprint(w, resp)
})
response, err := client.Server.SoftReboot(ctx, "6768e664-7e3e-11ea-ba40-ffdde7ae9a5b")
require.NoError(t, err)
assert.Equal(t, "Soft reboot server th\u00e0nh c\u00f4ng", response.Message)
} | explode_data.jsonl/35473 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 318
} | [
2830,
3393,
5475,
30531,
693,
4619,
1155,
353,
8840,
836,
8,
341,
84571,
741,
16867,
49304,
741,
2405,
13559,
81,
353,
4030,
198,
2109,
2200,
63623,
8623,
2740,
94492,
5475,
3144,
1141,
18920,
8984,
2512,
1820,
445,
21,
22,
21,
23,
68... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUUIDValidation(t *testing.T) {
tests := []struct {
param string
expected bool
}{
{"", false},
{"xxxa987fbc9-4bed-3078-cf07-9141ba07c9f3", false},
{"a987fbc9-4bed-3078-cf07-9141ba07c9f3xxx", false},
{"a987fbc94bed3078cf079141ba07c9f3", false},
{"934859", false},
{"987fbc9-4bed-3078-cf07a-9141ba07c9f3", false},
{"aaaaaaaa-1111-1111-aaag-111111111111", false},
{"a987fbc9-4bed-3078-cf07-9141ba07c9f3", true},
}
validate := New()
for i, test := range tests {
errs := validate.Var(test.param, "uuid")
if test.expected {
if !IsEqual(errs, nil) {
t.Fatalf("Index: %d UUID failed Error: %s", i, errs)
}
} else {
if IsEqual(errs, nil) {
t.Fatalf("Index: %d UUID failed Error: %s", i, errs)
} else {
val := getError(errs, "", "")
if val.Tag() != "uuid" {
t.Fatalf("Index: %d UUID failed Error: %s", i, errs)
}
}
}
}
} | explode_data.jsonl/77270 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 451
} | [
2830,
3393,
24754,
13799,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
36037,
262,
914,
198,
197,
42400,
1807,
198,
197,
59403,
197,
197,
4913,
497,
895,
1583,
197,
197,
4913,
4146,
9591,
24,
23,
22,
69,
8904,
24... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestPackagesCmdFlags(t *testing.T) {
request := "docker-archive:" + getFixtureImage(t, "image-pkg-coverage")
tests := []struct {
name string
args []string
env map[string]string
assertions []traitAssertion
}{
{
name: "no-args-shows-help",
args: []string{"packages"},
assertions: []traitAssertion{
assertInOutput("an image/directory argument is required"), // specific error that should be shown
assertInOutput("Generate a packaged-based Software Bill Of Materials"), // excerpt from help description
assertFailingReturnCode,
},
},
{
name: "json-output-flag",
args: []string{"packages", "-o", "json", request},
assertions: []traitAssertion{
assertJsonReport,
assertSuccessfulReturnCode,
},
},
{
name: "output-env-binding",
env: map[string]string{
"SYFT_OUTPUT": "json",
},
args: []string{"packages", request},
assertions: []traitAssertion{
assertJsonReport,
assertSuccessfulReturnCode,
},
},
{
name: "table-output-flag",
args: []string{"packages", "-o", "table", request},
assertions: []traitAssertion{
assertTableReport,
assertSuccessfulReturnCode,
},
},
{
name: "default-output-flag",
args: []string{"packages", request},
assertions: []traitAssertion{
assertTableReport,
assertSuccessfulReturnCode,
},
},
{
name: "squashed-scope-flag",
args: []string{"packages", "-o", "json", "-s", "squashed", request},
assertions: []traitAssertion{
assertPackageCount(17),
assertSuccessfulReturnCode,
},
},
{
name: "all-layers-scope-flag",
args: []string{"packages", "-o", "json", "-s", "all-layers", request},
assertions: []traitAssertion{
assertPackageCount(19),
assertSuccessfulReturnCode,
},
},
{
name: "all-layers-scope-flag-by-env",
args: []string{"packages", "-o", "json", request},
env: map[string]string{
"SYFT_PACKAGE_CATALOGER_SCOPE": "all-layers",
},
assertions: []traitAssertion{
assertPackageCount(19),
assertSuccessfulReturnCode,
},
},
{
name: "attempt-upload-on-cli-switches",
args: []string{"packages", "-vv", "-H", "localhost:8080", "-u", "the-username", "-d", "test-fixtures/image-pkg-coverage/Dockerfile", "--overwrite-existing-image", request},
env: map[string]string{
"SYFT_ANCHORE_PATH": "path/to/api",
"SYFT_ANCHORE_PASSWORD": "the-password",
},
assertions: []traitAssertion{
// we cannot easily assert a successful upload behavior, so instead we are doing the next best thing
// and asserting that the parsed configuration has the expected values and we see log entries
// indicating an upload attempt.
assertNotInOutput("the-username"),
assertNotInOutput("the-password"),
assertInOutput("uploading results to localhost:8080"),
assertInOutput(`dockerfile: test-fixtures/image-pkg-coverage/Dockerfile`),
assertInOutput(`overwrite-existing-image: true`),
assertInOutput(`path: path/to/api`),
assertInOutput(`host: localhost:8080`),
assertFailingReturnCode, // upload can't go anywhere, so if this passes that would be surprising
},
},
{
name: "dockerfile-without-upload-is-invalid",
args: []string{"packages", "-vv", "-d", "test-fixtures/image-pkg-coverage/Dockerfile", request},
assertions: []traitAssertion{
assertNotInOutput("uploading results to localhost:8080"),
assertInOutput("invalid application config: cannot provide dockerfile option without enabling upload"),
assertFailingReturnCode,
},
},
{
name: "attempt-upload-with-env-host-set",
args: []string{"packages", "-vv", request},
env: map[string]string{
"SYFT_ANCHORE_HOST": "localhost:8080",
},
assertions: []traitAssertion{
assertInOutput("uploading results to localhost:8080"),
assertFailingReturnCode, // upload can't go anywhere, so if this passes that would be surprising
},
},
{
// we want to make certain that syft can catalog a single go binary and get a SBOM report that is not empty
name: "catalog-single-go-binary",
args: []string{"packages", "-o", "json", getSyftBinaryLocation(t)},
assertions: []traitAssertion{
assertJsonReport,
assertStdoutLengthGreaterThan(1000),
assertSuccessfulReturnCode,
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
cmd, stdout, stderr := runSyft(t, test.env, test.args...)
for _, traitFn := range test.assertions {
traitFn(t, stdout, stderr, cmd.ProcessState.ExitCode())
}
if t.Failed() {
t.Log("STDOUT:\n", stdout)
t.Log("STDERR:\n", stderr)
t.Log("COMMAND:", strings.Join(cmd.Args, " "))
}
})
}
} | explode_data.jsonl/17778 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1882
} | [
2830,
3393,
69513,
15613,
9195,
1155,
353,
8840,
836,
8,
341,
23555,
1669,
330,
28648,
95100,
2974,
488,
633,
18930,
1906,
1155,
11,
330,
1805,
2268,
7351,
12,
54250,
5130,
78216,
1669,
3056,
1235,
341,
197,
11609,
981,
914,
198,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestAttrsReplaceV2(t *testing.T) {
c := setupTest([]string{"update", "attrs", "--host", "orion", "--id", "urn:ngsi-ld:Product:010", "--data", "{\"specialOffer\":{\"value\": true}}"})
reqRes := helper.MockHTTPReqRes{}
reqRes.Res.StatusCode = http.StatusNoContent
reqRes.Path = "/v2/entities/urn:ngsi-ld:Product:010/attrs"
helper.SetClientHTTP(c, reqRes)
err := attrsReplace(c, c.Ngsi, c.Client)
assert.NoError(t, err)
} | explode_data.jsonl/33079 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 180
} | [
2830,
3393,
53671,
23107,
53,
17,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
6505,
2271,
10556,
917,
4913,
2386,
497,
330,
20468,
497,
14482,
3790,
497,
330,
269,
290,
497,
14482,
307,
497,
330,
399,
25,
968,
6321,
12,
507,
25,
4816,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPeers(t *testing.T) {
once.Do(testSetup)
peer0 := &fakePeer{}
peer1 := &fakePeer{}
peer2 := &fakePeer{}
peerList := fakePeers([]ProtoGetter{peer0, peer1, peer2, nil})
const cacheSize = 0 // disabled
localHits := 0
getter := func(_ context.Context, key string, dest Sink) error {
localHits++
return dest.SetString("got:"+key, time.Time{})
}
testGroup := newGroup("TestPeers-group", cacheSize, GetterFunc(getter), peerList)
run := func(name string, n int, wantSummary string) {
// Reset counters
localHits = 0
for _, p := range []*fakePeer{peer0, peer1, peer2} {
p.hits = 0
}
for i := 0; i < n; i++ {
key := fmt.Sprintf("key-%d", i)
want := "got:" + key
var got string
err := testGroup.Get(dummyCtx, key, StringSink(&got))
if err != nil {
t.Errorf("%s: error on key %q: %v", name, key, err)
continue
}
if got != want {
t.Errorf("%s: for key %q, got %q; want %q", name, key, got, want)
}
}
summary := func() string {
return fmt.Sprintf("localHits = %d, peers = %d %d %d", localHits, peer0.hits, peer1.hits, peer2.hits)
}
if got := summary(); got != wantSummary {
t.Errorf("%s: got %q; want %q", name, got, wantSummary)
}
}
resetCacheSize := func(maxBytes int64) {
g := testGroup
g.cacheBytes = maxBytes
g.mainCache = cache{}
g.hotCache = cache{}
}
// Base case; peers all up, with no problems.
resetCacheSize(1 << 20)
run("base", 200, "localHits = 49, peers = 51 49 51")
// Verify cache was hit. All localHits and peers are gone as the hotCache has
// the data we need
run("cached_base", 200, "localHits = 0, peers = 0 0 0")
resetCacheSize(0)
// With one of the peers being down.
// TODO(bradfitz): on a peer number being unavailable, the
// consistent hashing should maybe keep trying others to
// spread the load out. Currently it fails back to local
// execution if the first consistent-hash slot is unavailable.
peerList[0] = nil
run("one_peer_down", 200, "localHits = 100, peers = 0 49 51")
// Failing peer
peerList[0] = peer0
peer0.fail = true
run("peer0_failing", 200, "localHits = 100, peers = 51 49 51")
} | explode_data.jsonl/62846 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 842
} | [
2830,
3393,
10197,
388,
1155,
353,
8840,
836,
8,
341,
197,
13184,
33596,
8623,
21821,
340,
197,
16537,
15,
1669,
609,
30570,
30888,
16094,
197,
16537,
16,
1669,
609,
30570,
30888,
16094,
197,
16537,
17,
1669,
609,
30570,
30888,
16094,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestContextWithoutDeadline_cancel(t *testing.T) {
ctxWithDeadline, cancelWithDeadline := context.WithTimeout(context.Background(), time.Minute)
defer cancelWithDeadline()
ctxNoDeadline, cancelNoDeadline := contextWithoutDeadline(ctxWithDeadline)
cancelNoDeadline()
select {
case <-ctxNoDeadline.Done():
case <-time.After(10 * time.Second):
t.Fatal("expected context to be done")
}
} | explode_data.jsonl/52670 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 130
} | [
2830,
3393,
1972,
26040,
83593,
28895,
1155,
353,
8840,
836,
8,
341,
20985,
2354,
83593,
11,
9121,
2354,
83593,
1669,
2266,
26124,
7636,
5378,
19047,
1507,
882,
75770,
340,
16867,
9121,
2354,
83593,
741,
20985,
2753,
83593,
11,
9121,
2753... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_procSubscribePush_nilParacheck(t *testing.T) {
chain, mock33 := createBlockChain(t)
defer mock33.Close()
err := chain.procSubscribePush(nil)
assert.Equal(t, err, types.ErrInvalidParam)
} | explode_data.jsonl/61711 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 73
} | [
2830,
3393,
24436,
28573,
16644,
36175,
4272,
1777,
377,
1155,
353,
8840,
836,
8,
341,
197,
8819,
11,
7860,
18,
18,
1669,
1855,
4713,
18837,
1155,
340,
16867,
7860,
18,
18,
10421,
741,
9859,
1669,
8781,
83430,
28573,
16644,
27907,
340,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestWSUsersAuth(t *testing.T) {
users := []*User{{Username: "user", Password: "pwd"}}
for _, test := range []struct {
name string
opts func() *Options
user string
pass string
err string
}{
{
"no filtering, wrong user",
func() *Options {
o := testWSOptions()
o.Users = users
return o
},
"wronguser", "pwd", "-ERR 'Authorization Violation'",
},
{
"no filtering, correct user",
func() *Options {
o := testWSOptions()
o.Users = users
return o
},
"user", "pwd", "",
},
{
"filering, user not allowed",
func() *Options {
o := testWSOptions()
o.Users = users
// Only allowed for regular clients
o.Users[0].AllowedConnectionTypes = testCreateAllowedConnectionTypes([]string{jwt.ConnectionTypeStandard})
return o
},
"user", "pwd", "-ERR 'Authorization Violation'",
},
{
"filtering, user allowed",
func() *Options {
o := testWSOptions()
o.Users = users
o.Users[0].AllowedConnectionTypes = testCreateAllowedConnectionTypes([]string{jwt.ConnectionTypeStandard, jwt.ConnectionTypeWebsocket})
return o
},
"user", "pwd", "",
},
{
"filtering, wrong password",
func() *Options {
o := testWSOptions()
o.Users = users
o.Users[0].AllowedConnectionTypes = testCreateAllowedConnectionTypes([]string{jwt.ConnectionTypeStandard, jwt.ConnectionTypeWebsocket})
return o
},
"user", "badpassword", "-ERR 'Authorization Violation'",
},
} {
t.Run(test.name, func(t *testing.T) {
o := test.opts()
s := RunServer(o)
defer s.Shutdown()
wsc, br, _ := testWSCreateClientGetInfo(t, false, false, o.Websocket.Host, o.Websocket.Port)
defer wsc.Close()
connectProto := fmt.Sprintf("CONNECT {\"verbose\":false,\"protocol\":1,\"user\":\"%s\",\"pass\":\"%s\"}\r\nPING\r\n",
test.user, test.pass)
wsmsg := testWSCreateClientMsg(wsBinaryMessage, 1, true, false, []byte(connectProto))
if _, err := wsc.Write(wsmsg); err != nil {
t.Fatalf("Error sending message: %v", err)
}
msg := testWSReadFrame(t, br)
if test.err == "" && !bytes.HasPrefix(msg, []byte("PONG\r\n")) {
t.Fatalf("Expected to receive PONG, got %q", msg)
} else if test.err != "" && !bytes.HasPrefix(msg, []byte(test.err)) {
t.Fatalf("Expected to receive %q, got %q", test.err, msg)
}
})
}
} | explode_data.jsonl/42731 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1001
} | [
2830,
3393,
7433,
7137,
5087,
1155,
353,
8840,
836,
8,
341,
90896,
1669,
29838,
1474,
2979,
11115,
25,
330,
872,
497,
12362,
25,
330,
25565,
95642,
2023,
8358,
1273,
1669,
2088,
3056,
1235,
341,
197,
11609,
914,
198,
197,
64734,
2915,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestListProjectPaths(t *testing.T) {
tmpDir, cleanup, err := dirs.TempDir("", "")
require.NoError(t, err)
defer cleanup()
origWd, err := os.Getwd()
require.NoError(t, err)
for i, tc := range []struct {
name string
files []gofiles.GoFileSpec
wd string
include matcher.Matcher
exclude matcher.Matcher
want []string
}{
{
"empty matcher matches nothing",
[]gofiles.GoFileSpec{
{
RelPath: "foo.go",
},
},
".",
nil,
nil,
nil,
},
{
"matcher matches files and directories",
[]gofiles.GoFileSpec{
{
RelPath: "foo.go",
},
{
RelPath: "bar/bar.go",
},
},
".",
matcher.Name(`.+`),
nil,
[]string{
"bar",
"bar/bar.go",
"foo.go",
},
},
{
"matcher returns relative paths",
[]gofiles.GoFileSpec{
{
RelPath: "foo.go",
},
{
RelPath: "bar/bar.go",
},
},
"bar",
matcher.Name(`.+`),
nil,
[]string{
"../bar",
"../bar/bar.go",
"../foo.go",
},
},
{
"exclude matcher is used",
[]gofiles.GoFileSpec{
{
RelPath: "foo.go",
},
{
RelPath: "bar/bar.go",
},
},
"bar",
matcher.Name(`.+`),
matcher.Name(`bar.go`),
[]string{
"../bar",
"../foo.go",
},
},
} {
projectDir, err := ioutil.TempDir(tmpDir, "project")
require.NoError(t, err)
projectDir, err = filepath.EvalSymlinks(projectDir)
require.NoError(t, err)
_, err = gofiles.Write(projectDir, tc.files)
require.NoError(t, err)
func() {
err = os.Chdir(path.Join(projectDir, tc.wd))
require.NoError(t, err)
defer func() {
err = os.Chdir(origWd)
require.NoError(t, err)
}()
got, err := godellauncher.ListProjectPaths(projectDir, tc.include, tc.exclude)
require.NoError(t, err)
assert.Equal(t, tc.want, got, "Case %d: %s", i, tc.name)
}()
}
} | explode_data.jsonl/49965 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 983
} | [
2830,
3393,
852,
7849,
26901,
1155,
353,
8840,
836,
8,
341,
20082,
6184,
11,
21290,
11,
1848,
1669,
42248,
65009,
6184,
19814,
14676,
17957,
35699,
1155,
11,
1848,
340,
16867,
21290,
2822,
197,
4670,
54,
67,
11,
1848,
1669,
2643,
2234,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParseRules(t *testing.T) {
test := []string{`{P}|1|[2]^"a"`,
`{P}|1|[:2]^"a"`,
`{P}|0|[%2]#({W}|0|[:2]^"br")`,
`{P}|0|[2]#({W}|0|[2:]$"brazil")`,
`{B}|1|[@2]^"a"`,
`{P}|1|[:2]9"a"`}
want := []bool{true, true, true, true, false, false}
for idx, item := range test {
result, _ := ParseRules(item)
if result != want[idx] {
t.Errorf("Fail to valid rule (%s), expected (%t), received (%t)", item, want[idx], result)
}
}
} | explode_data.jsonl/73978 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 249
} | [
2830,
3393,
14463,
26008,
1155,
353,
8840,
836,
8,
341,
18185,
1669,
3056,
917,
90,
63,
90,
47,
52398,
16,
74723,
17,
90304,
1,
64,
1,
12892,
197,
197,
63,
90,
47,
52398,
16,
91,
3447,
17,
90304,
1,
64,
1,
12892,
197,
197,
63,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestRCAsync(t *testing.T) {
tests := []testRun{{
Name: "ok",
URL: "rc/noop",
Method: "POST",
ContentType: "application/json",
Body: `{ "_async":true }`,
Status: http.StatusOK,
Contains: regexp.MustCompile(`(?s)\{.*\"jobid\":.*\}`),
}, {
Name: "bad",
URL: "rc/noop",
Method: "POST",
ContentType: "application/json",
Body: `{ "_async":"truthy" }`,
Status: http.StatusBadRequest,
Expected: `{
"error": "couldn't parse key \"_async\" (truthy) as bool: strconv.ParseBool: parsing \"truthy\": invalid syntax",
"input": {
"_async": "truthy"
},
"path": "rc/noop",
"status": 400
}
`,
}}
opt := newTestOpt()
opt.Serve = true
opt.Files = ""
testServer(t, tests, &opt)
} | explode_data.jsonl/12972 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 376
} | [
2830,
3393,
7380,
6525,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1944,
6727,
90,
515,
197,
21297,
25,
286,
330,
562,
756,
197,
79055,
25,
260,
330,
1287,
33100,
453,
756,
197,
84589,
25,
414,
330,
2946,
756,
197,
197,
29504,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_Workspaces_Spec_WhenSerializedToJson_DeserializesAsEqual(t *testing.T) {
t.Parallel()
parameters := gopter.DefaultTestParameters()
parameters.MaxSize = 10
properties := gopter.NewProperties(parameters)
properties.Property(
"Round trip of Workspaces_Spec via JSON returns original",
prop.ForAll(RunJSONSerializationTestForWorkspacesSpec, WorkspacesSpecGenerator()))
properties.TestingRun(t, gopter.NewFormatedReporter(true, 240, os.Stdout))
} | explode_data.jsonl/43363 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 148
} | [
2830,
3393,
87471,
44285,
1098,
992,
62,
4498,
77521,
78967,
98054,
2848,
4756,
2121,
2993,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
67543,
1669,
728,
73137,
13275,
2271,
9706,
741,
67543,
14535,
1695,
284,
220,
16,
15,
198... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestValidateAuthInfoExecNoEnv(t *testing.T) {
config := clientcmdapi.NewConfig()
config.AuthInfos["user"] = &clientcmdapi.AuthInfo{
Exec: &clientcmdapi.ExecConfig{
Command: "/bin/example",
APIVersion: "clientauthentication.k8s.io/v1alpha1",
Env: []clientcmdapi.ExecEnvVar{
{Name: "foo", Value: ""},
},
},
}
test := configValidationTest{
config: config,
}
test.testAuthInfo("user", t)
test.testConfig(t)
} | explode_data.jsonl/69957 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 185
} | [
2830,
3393,
17926,
5087,
1731,
10216,
2753,
14359,
1155,
353,
8840,
836,
8,
341,
25873,
1669,
2943,
8710,
2068,
7121,
2648,
741,
25873,
25233,
38059,
1183,
872,
1341,
284,
609,
2972,
8710,
2068,
25233,
1731,
515,
197,
197,
10216,
25,
60... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBuffer_AddCallsMetricRejectWhenNotInBatch(t *testing.T) {
var reject int
mm := &MockMetric{
Metric: Metric(),
RejectF: func() {
reject++
},
}
b := setup(NewBuffer("test", 5))
setup(b)
b.Add(mm, mm, mm, mm, mm)
batch := b.Batch(2)
b.Add(mm, mm, mm, mm)
require.Equal(t, 2, reject)
b.Reject(batch)
require.Equal(t, 4, reject)
} | explode_data.jsonl/17695 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 169
} | [
2830,
3393,
4095,
21346,
55292,
54310,
78413,
4498,
2623,
641,
21074,
1155,
353,
8840,
836,
8,
341,
2405,
7850,
526,
198,
2109,
76,
1669,
609,
11571,
54310,
515,
197,
9209,
16340,
25,
52458,
3148,
197,
197,
78413,
37,
25,
2915,
368,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRoundTripMachineSet(t *testing.T) {
codecs := serializer.NewCodecFactory(scheme.Scheme)
seed := time.Now().UnixNano()
machineFuzzer := fuzzer.FuzzerFor(fuzzer.MergeFuzzerFuncs(metafuzzer.Funcs, machineFuzzerFuncs), rand.NewSource(seed), codecs)
ctx := context.Background()
g := NewWithT(t)
for i := 0; i < 100; i++ {
machineSet := &MachineSet{
ObjectMeta: metav1.ObjectMeta{
GenerateName: "machineset-round-trip-test-",
Namespace: "default",
},
}
// Fuzz the spec and status as those are the ones we need to check aren't
// losing data
spec := &MachineSetSpec{}
status := &MachineSetStatus{}
machineFuzzer.Fuzz(spec)
machineFuzzer.Fuzz(status)
machineSet.Spec = *spec.DeepCopy()
g.Expect(c.Create(ctx, machineSet)).To(Succeed())
machineSet.Status = *status.DeepCopy()
g.Expect(c.Status().Update(ctx, machineSet)).To(Succeed())
// Check the spec and status weren't modified during create
//
// Use JSON representation as order of fields in RawExtensions may change
// during a round trip
machineSetSpecJSON, err := json.Marshal(machineSet.Spec)
g.Expect(err).ToNot(HaveOccurred())
specJSON, err := json.Marshal(*spec)
g.Expect(err).ToNot(HaveOccurred())
g.Expect(machineSetSpecJSON).To(MatchJSON(specJSON))
machineSetStatusJSON, err := json.Marshal(machineSet.Status)
g.Expect(err).ToNot(HaveOccurred())
statusJSON, err := json.Marshal(*status)
g.Expect(err).ToNot(HaveOccurred())
g.Expect(machineSetStatusJSON).To(MatchJSON(statusJSON))
fetched := &MachineSet{}
key := client.ObjectKey{Namespace: machineSet.Namespace, Name: machineSet.Name}
g.Expect(c.Get(ctx, key, fetched)).To(Succeed())
// Check the spec and status haven't changed server side
g.Expect(fetched.Spec).To(Equal(machineSet.Spec))
g.Expect(fetched.Status).To(Equal(machineSet.Status))
}
} | explode_data.jsonl/69318 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 698
} | [
2830,
3393,
27497,
56352,
21605,
1649,
1155,
353,
8840,
836,
8,
341,
43343,
4837,
1669,
21759,
7121,
36913,
4153,
1141,
8058,
92719,
340,
197,
22602,
1669,
882,
13244,
1005,
55832,
83819,
741,
2109,
3814,
37,
91447,
1669,
282,
91447,
991,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestPath(t *testing.T) {
jsonStr := basicJSON
r := Get(jsonStr, "@this")
path := r.Path(jsonStr)
if path != "@this" {
t.FailNow()
}
r = Parse(jsonStr)
path = r.Path(jsonStr)
if path != "@this" {
t.FailNow()
}
obj := Parse(jsonStr)
obj.ForEach(func(key, val Result) bool {
kp := key.Path(jsonStr)
assert(t, kp == "")
vp := val.Path(jsonStr)
if vp == "name" {
// there are two "name" keys
return true
}
val2 := obj.Get(vp)
assert(t, val2.Raw == val.Raw)
return true
})
arr := obj.Get("loggy.programmers")
arr.ForEach(func(_, val Result) bool {
vp := val.Path(jsonStr)
val2 := Get(jsonStr, vp)
assert(t, val2.Raw == val.Raw)
return true
})
get := func(path string) {
r1 := Get(jsonStr, path)
path2 := r1.Path(jsonStr)
r2 := Get(jsonStr, path2)
assert(t, r1.Raw == r2.Raw)
}
get("age")
get("name")
get("name.here")
get("noop")
get("noop.what is a wren?")
get("arr.0")
get("arr.1")
get("arr.2")
get("arr.3")
get("arr.3.hello")
get("arr.4")
get("arr.5")
get("loggy.programmers.2.email")
get("lastly.end\\.\\.\\.ing")
get("lastly.yay")
} | explode_data.jsonl/43415 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 526
} | [
2830,
3393,
1820,
1155,
353,
8840,
836,
8,
341,
30847,
2580,
1669,
6770,
5370,
198,
7000,
1669,
2126,
9304,
2580,
11,
8428,
574,
1138,
26781,
1669,
435,
17474,
9304,
2580,
340,
743,
1815,
961,
8428,
574,
1,
341,
197,
3244,
57243,
7039... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestRegister3(t *testing.T) {
testRegisterWithFrontEnd(func() lib.Reactor { return NewFrontEnd3() }, 1000.0, t,
10,
[]lib.Fault{})
} | explode_data.jsonl/79905 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 57
} | [
2830,
3393,
8690,
18,
1155,
353,
8840,
836,
8,
341,
18185,
8690,
2354,
23395,
3727,
18552,
368,
3051,
2817,
5621,
314,
470,
1532,
23395,
3727,
18,
368,
2470,
220,
16,
15,
15,
15,
13,
15,
11,
259,
345,
197,
197,
16,
15,
345,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestCreateFirewallRule(t *testing.T) {
handler := APIMock{
T: t,
ExpectMethod: "POST",
ExpectURL: "/1.0/firewall_rules",
ExpectBody: map[string]string{
"firewall_policy": "fwp-j3654",
"protocol": "tcp",
"source": "grp-xxxxx",
"destination": ""},
GiveBody: readJSON("firewall_rule"),
}
ts := httptest.NewServer(&handler)
defer ts.Close()
client, err := brightbox.NewClient(ts.URL, "", nil)
if err != nil {
t.Fatal(err)
}
pol := "fwp-j3654"
proto := "tcp"
dst := ""
src := "grp-xxxxx"
opts := brightbox.FirewallRuleOptions{
FirewallPolicy: pol,
Protocol: &proto,
Source: &src,
Destination: &dst,
}
p, err := client.CreateFirewallRule(&opts)
if err != nil {
t.Fatal(err)
}
if p == nil {
t.Errorf("Didn't return a firewall rule")
}
if p.Id != "fwr-k32ls" {
t.Errorf("firewall rule id is %s", p.Id)
}
} | explode_data.jsonl/17594 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 447
} | [
2830,
3393,
4021,
16697,
16431,
11337,
1155,
353,
8840,
836,
8,
341,
53326,
1669,
10106,
1791,
1176,
515,
197,
10261,
25,
310,
259,
345,
197,
35911,
3523,
25,
330,
2946,
756,
197,
35911,
3144,
25,
262,
3521,
16,
13,
15,
53010,
16431,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestReplacingGlobalCommandHook(t *testing.T) {
t.Parallel()
tester, err := NewBootstrapTester()
if err != nil {
t.Fatal(err)
}
defer tester.Close()
tester.ExpectGlobalHook("command").Once().AndExitWith(0)
tester.ExpectGlobalHook("environment").Once()
tester.ExpectGlobalHook("pre-checkout").Once()
tester.ExpectGlobalHook("post-checkout").Once()
tester.ExpectLocalHook("post-checkout").Once()
tester.ExpectGlobalHook("pre-command").Once()
tester.ExpectLocalHook("pre-command").Once()
tester.ExpectGlobalHook("post-command").Once()
tester.ExpectLocalHook("post-command").Once()
tester.ExpectGlobalHook("pre-exit").Once()
tester.ExpectLocalHook("pre-exit").Once()
tester.RunAndCheck(t)
} | explode_data.jsonl/8971 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 270
} | [
2830,
3393,
81160,
11646,
4062,
31679,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
18185,
261,
11,
1848,
1669,
1532,
45511,
58699,
741,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
532,
16867,
37111,
10421,
28... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestLightningNetworkDaemon(t *testing.T) {
ht := newHarnessTest(t)
var lndHarness *lntest.NetworkHarness
// First create an instance of the btcd's rpctest.Harness. This will be
// used to fund the wallets of the nodes within the test network and to
// drive blockchain related events within the network. Revert the default
// setting of accepting non-standard transactions on simnet to reject them.
// Transactions on the lightning network should always be standard to get
// better guarantees of getting included in to blocks.
args := []string{"--rejectnonstd"}
handlers := &rpcclient.NotificationHandlers{
OnTxAccepted: func(hash *chainhash.Hash, amt btcutil.Amount) {
lndHarness.OnTxAccepted(hash)
},
}
btcdHarness, err := rpctest.New(harnessNetParams, handlers, args)
if err != nil {
ht.Fatalf("unable to create mining node: %v", err)
}
defer btcdHarness.TearDown()
// First create the network harness to gain access to its
// 'OnTxAccepted' call back.
lndHarness, err = lntest.NewNetworkHarness(btcdHarness)
if err != nil {
ht.Fatalf("unable to create lightning network harness: %v", err)
}
defer lndHarness.TearDownAll()
// Spawn a new goroutine to watch for any fatal errors that any of the
// running lnd processes encounter. If an error occurs, then the test
// case should naturally as a result and we log the server error here to
// help debug.
go func() {
for {
select {
case err, more := <-lndHarness.ProcessErrors():
if !more {
return
}
ht.Logf("lnd finished with error (stderr):\n%v", err)
}
}
}()
// Turn off the btcd rpc logging, otherwise it will lead to panic.
// TODO(andrew.shvv|roasbeef) Remove the hack after re-work the way the log
// rotator os work.
rpcclient.UseLogger(btclog.Disabled)
if err := btcdHarness.SetUp(true, 50); err != nil {
ht.Fatalf("unable to set up mining node: %v", err)
}
if err := btcdHarness.Node.NotifyNewTransactions(false); err != nil {
ht.Fatalf("unable to request transaction notifications: %v", err)
}
// Next mine enough blocks in order for segwit and the CSV package
// soft-fork to activate on SimNet.
numBlocks := chaincfg.SimNetParams.MinerConfirmationWindow * 2
if _, err := btcdHarness.Node.Generate(numBlocks); err != nil {
ht.Fatalf("unable to generate blocks: %v", err)
}
// With the btcd harness created, we can now complete the
// initialization of the network. args - list of lnd arguments,
// example: "--debuglevel=debug"
// TODO(roasbeef): create master balanced channel with all the monies?
if err = lndHarness.SetUp(nil); err != nil {
ht.Fatalf("unable to set up test lightning network: %v", err)
}
t.Logf("Running %v integration tests", len(testsCases))
for _, testCase := range testsCases {
logLine := fmt.Sprintf("STARTING ============ %v ============\n",
testCase.name)
err := lndHarness.EnsureConnected(
context.Background(), lndHarness.Alice, lndHarness.Bob,
)
if err != nil {
t.Fatalf("unable to connect alice to bob: %v", err)
}
if err := lndHarness.Alice.AddToLog(logLine); err != nil {
t.Fatalf("unable to add to log: %v", err)
}
if err := lndHarness.Bob.AddToLog(logLine); err != nil {
t.Fatalf("unable to add to log: %v", err)
}
success := t.Run(testCase.name, func(t1 *testing.T) {
ht := newHarnessTest(t1)
ht.RunTestCase(testCase, lndHarness)
})
// Stop at the first failure. Mimic behavior of original test
// framework.
if !success {
break
}
}
} | explode_data.jsonl/2030 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1256
} | [
2830,
3393,
13911,
1229,
12320,
89177,
1155,
353,
8840,
836,
8,
341,
197,
426,
1669,
501,
74248,
2271,
1155,
692,
2405,
326,
303,
74248,
353,
75,
406,
477,
30149,
74248,
271,
197,
322,
5512,
1855,
458,
2867,
315,
279,
19592,
4385,
594... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLogAsJSONEnabled(t *testing.T) {
t.Run("dapr.io/log-as-json is true", func(t *testing.T) {
var fakeAnnotation = map[string]string{
daprLogAsJSON: "true",
}
assert.Equal(t, true, logAsJSONEnabled(fakeAnnotation))
})
t.Run("dapr.io/log-as-json is false", func(t *testing.T) {
var fakeAnnotation = map[string]string{
daprLogAsJSON: "false",
}
assert.Equal(t, false, logAsJSONEnabled(fakeAnnotation))
})
t.Run("dapr.io/log-as-json is not given", func(t *testing.T) {
var fakeAnnotation = map[string]string{}
assert.Equal(t, false, logAsJSONEnabled(fakeAnnotation))
})
} | explode_data.jsonl/25736 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 255
} | [
2830,
3393,
2201,
2121,
5370,
5462,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
67,
59817,
4245,
19413,
32434,
56080,
374,
830,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
2405,
12418,
19711,
284,
2415,
14032,
30953,
515,
298,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestReplaceEdge(t *testing.T) {
var ctx context.Context
c := createClientFromEnv(t, true)
db := ensureDatabase(ctx, c, "edge_test", nil, t)
prefix := "replace_edge_"
g := ensureGraph(ctx, db, prefix+"graph", nil, t)
ec := ensureEdgeCollection(ctx, g, prefix+"citiesPerState", []string{prefix + "city"}, []string{prefix + "state"}, t)
cities := ensureCollection(ctx, db, prefix+"city", nil, t)
states := ensureCollection(ctx, db, prefix+"state", nil, t)
from := createDocument(ctx, cities, map[string]interface{}{"name": "Venlo"}, t)
to := createDocument(ctx, states, map[string]interface{}{"name": "Limburg"}, t)
doc := RouteEdge{
From: from.ID.String(),
To: to.ID.String(),
Distance: 123,
}
meta, err := ec.CreateDocument(ctx, doc)
if err != nil {
t.Fatalf("Failed to create new document: %s", describe(err))
}
// Replacement doc
replacement := RouteEdge{
From: to.ID.String(),
To: from.ID.String(),
Distance: 567,
}
if _, err := ec.ReplaceDocument(ctx, meta.Key, replacement); err != nil {
t.Fatalf("Failed to replace document '%s': %s", meta.Key, describe(err))
}
// Read replaces document
var readDoc RouteEdge
if _, err := ec.ReadDocument(ctx, meta.Key, &readDoc); err != nil {
t.Fatalf("Failed to read document '%s': %s", meta.Key, describe(err))
}
if !reflect.DeepEqual(replacement, readDoc) {
t.Errorf("Got wrong document. Expected %+v, got %+v", replacement, readDoc)
}
} | explode_data.jsonl/26401 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 546
} | [
2830,
3393,
23107,
11656,
1155,
353,
8840,
836,
8,
341,
2405,
5635,
2266,
9328,
198,
1444,
1669,
1855,
2959,
3830,
14359,
1155,
11,
830,
340,
20939,
1669,
5978,
5988,
7502,
11,
272,
11,
330,
7186,
4452,
497,
2092,
11,
259,
340,
3223,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestNUp(t *testing.T) {
outDir := "../../samples/nup"
for _, tt := range []struct {
msg string
inFiles []string
outFile string
selectedPages []string
desc string
n int
isImg bool
}{
// 4-Up a PDF
{"TestNUpFromPDF",
[]string{filepath.Join(inDir, "WaldenFull.pdf")},
filepath.Join(outDir, "NUpFromPDF.pdf"),
nil,
"",
9,
false},
// 2-Up a PDF with CropBox
{"TestNUpFromPdfWithCropBox",
[]string{filepath.Join(inDir, "grid_example.pdf")},
filepath.Join(outDir, "NUpFromPDFWithCropBox.pdf"),
nil,
"f:A5L, b:on, m:0",
2,
false},
// 9-Up an image
{"TestNUpFromSingleImage",
[]string{filepath.Join(resDir, "logoSmall.png")},
filepath.Join(outDir, "NUpFromSingleImage.pdf"),
nil,
"f:A3P",
16,
true},
// 6-Up a sequence of images.
{"TestNUpFromImages",
imageFileNames(t, "../../../resources"),
filepath.Join(outDir, "NUpFromImages.pdf"),
nil,
"f:Tabloid, b:on, m:0",
6,
true},
} {
testNUp(t, tt.msg, tt.inFiles, tt.outFile, tt.selectedPages, tt.desc, tt.n, tt.isImg)
}
} | explode_data.jsonl/4584 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 579
} | [
2830,
3393,
45,
2324,
1155,
353,
8840,
836,
8,
341,
13967,
6184,
1669,
10208,
41118,
9612,
454,
1837,
2023,
8358,
17853,
1669,
2088,
3056,
1235,
341,
197,
21169,
1843,
914,
198,
197,
17430,
10809,
981,
3056,
917,
198,
197,
13967,
1703,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestNoAnnotationForEmptyDeployment(t *testing.T) {
t.Parallel()
deploymentSpec := apps_v1.Deployment{
TypeMeta: meta_v1.TypeMeta{
Kind: "Deployment",
APIVersion: apps_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Namespace: testNs,
},
Spec: apps_v1.DeploymentSpec{},
}
var one int32 = 1
expectedDeploymentSpec := apps_v1.Deployment{
TypeMeta: meta_v1.TypeMeta{
Kind: "Deployment",
APIVersion: apps_v1.SchemeGroupVersion.String(),
},
ObjectMeta: meta_v1.ObjectMeta{
Namespace: testNs,
Annotations: map[string]string{
LastAppliedReplicasAnnotation: "1",
},
},
Spec: apps_v1.DeploymentSpec{
Replicas: &one,
Template: core_v1.PodTemplateSpec{
ObjectMeta: meta_v1.ObjectMeta{
Annotations: map[string]string{
EnvRefHashAnnotation: nullSha256,
},
},
},
},
}
spec := runtimeToUnstructured(t, &deploymentSpec)
expectedSpec := runtimeToUnstructured(t, &expectedDeploymentSpec)
store := speccheckertesting.FakeStore{Namespace: testNs}
logger := zaptest.NewLogger(t)
defer logger.Sync() // nolint: errcheck
updatedSpec, err := deployment{}.BeforeCreate(&specchecker.Context{Logger: logger, Store: store}, spec)
require.NoError(t, err)
updatedSpecUnstr := runtimeToUnstructured(t, updatedSpec)
if !assert.True(t, equality.Semantic.DeepEqual(expectedSpec.Object, updatedSpecUnstr.Object)) {
t.Log(diff.ObjectReflectDiff(expectedSpec.Object, updatedSpecUnstr.Object))
}
} | explode_data.jsonl/78684 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 602
} | [
2830,
3393,
2753,
19711,
2461,
3522,
75286,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
197,
82213,
8327,
1669,
10500,
2273,
16,
34848,
39130,
515,
197,
27725,
12175,
25,
8823,
2273,
16,
10184,
12175,
515,
298,
197,
10629,
25... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestCertificateRequestOperationError(t *testing.T) {
domainName := "example.com"
aliases := []string{}
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
mockClient := client.NewMockClient(mockCtrl)
mockOutput := &mock.Output{}
operation := certificateRequestOperation{
acm: mockClient,
aliases: aliases,
domainName: domainName,
output: mockOutput,
}
mockClient.EXPECT().RequestCertificate(domainName, aliases).Return("", fmt.Errorf("oops, something went wrong"))
operation.execute()
if !mockOutput.Exited {
t.Errorf("Expected premature exit; didn't")
}
if len(mockOutput.FatalMsgs) == 0 {
t.Errorf("Expected error output from operation, got none")
}
} | explode_data.jsonl/29937 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 255
} | [
2830,
3393,
33202,
1900,
8432,
1454,
1155,
353,
8840,
836,
8,
341,
2698,
3121,
675,
1669,
330,
8687,
905,
698,
197,
33924,
1669,
3056,
917,
31483,
77333,
15001,
1669,
342,
316,
1176,
7121,
2051,
1155,
340,
16867,
7860,
15001,
991,
18176... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestGetHistoricCandles(t *testing.T) {
t.Parallel()
currencyPair, err := currency.NewPairFromString("BTC/USD")
if err != nil {
t.Fatal(err)
}
start := time.Date(2019, 11, 12, 0, 0, 0, 0, time.UTC)
end := start.AddDate(0, 0, 2)
_, err = f.GetHistoricCandles(context.Background(),
currencyPair, asset.Spot, start, end, kline.OneDay)
if err != nil {
t.Fatal(err)
}
} | explode_data.jsonl/15232 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 164
} | [
2830,
3393,
1949,
48983,
292,
34,
20125,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
1444,
5088,
12443,
11,
1848,
1669,
11413,
7121,
12443,
44491,
445,
59118,
14,
26749,
1138,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestTagsMarshal(t *testing.T) {
c := NewComment()
c.Tags.Add("wow")
c.Tags.Add("such")
got, err := json.Marshal(c)
if err != nil {
t.Error(err)
}
expected1 := []byte(`{"tags":["wow","such"]}`)
expected2 := []byte(`{"tags":["such","wow"]}`)
if !reflect.DeepEqual(got, expected1) && !reflect.DeepEqual(got, expected2) {
t.Errorf("Expected marshaling the comment to return %s or %s but got: %s!", expected1, expected2, got)
}
} | explode_data.jsonl/58853 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 180
} | [
2830,
3393,
15930,
55438,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
1532,
10677,
741,
1444,
73522,
1904,
445,
57454,
1138,
1444,
73522,
1904,
445,
20805,
1138,
3174,
354,
11,
1848,
1669,
2951,
37271,
1337,
340,
743,
1848,
961,
2092,
341... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestMocker_DeleteWebhook(t *testing.T) {
m, s := NewSession(t)
var id discord.WebhookID = 123
m.DeleteWebhook(id)
err := s.DeleteWebhook(id)
require.NoError(t, err)
} | explode_data.jsonl/49381 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 73
} | [
2830,
3393,
11571,
261,
57418,
5981,
20873,
1155,
353,
8840,
836,
8,
341,
2109,
11,
274,
1669,
1532,
5283,
1155,
692,
2405,
877,
31041,
6473,
20873,
915,
284,
220,
16,
17,
18,
198,
2109,
18872,
5981,
20873,
3724,
692,
9859,
1669,
274,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestImageTag(t *testing.T) {
expectedURL := "/images/image_id/tag"
tagCases := []struct {
reference string
expectedQueryParams map[string]string
}{
{
reference: "repository:tag1",
expectedQueryParams: map[string]string{
"repo": "repository",
"tag": "tag1",
},
}, {
reference: "another_repository:latest",
expectedQueryParams: map[string]string{
"repo": "another_repository",
"tag": "latest",
},
}, {
reference: "another_repository",
expectedQueryParams: map[string]string{
"repo": "another_repository",
"tag": "latest",
},
}, {
reference: "test/another_repository",
expectedQueryParams: map[string]string{
"repo": "test/another_repository",
"tag": "latest",
},
}, {
reference: "test/another_repository:tag1",
expectedQueryParams: map[string]string{
"repo": "test/another_repository",
"tag": "tag1",
},
}, {
reference: "test/test/another_repository:tag1",
expectedQueryParams: map[string]string{
"repo": "test/test/another_repository",
"tag": "tag1",
},
}, {
reference: "test:5000/test/another_repository:tag1",
expectedQueryParams: map[string]string{
"repo": "test:5000/test/another_repository",
"tag": "tag1",
},
}, {
reference: "test:5000/test/another_repository",
expectedQueryParams: map[string]string{
"repo": "test:5000/test/another_repository",
"tag": "latest",
},
},
}
for _, tagCase := range tagCases {
client := &Client{
client: newMockClient(func(req *http.Request) (*http.Response, error) {
if !strings.HasPrefix(req.URL.Path, expectedURL) {
return nil, fmt.Errorf("expected URL '%s', got '%s'", expectedURL, req.URL)
}
if req.Method != "POST" {
return nil, fmt.Errorf("expected POST method, got %s", req.Method)
}
query := req.URL.Query()
for key, expected := range tagCase.expectedQueryParams {
actual := query.Get(key)
if actual != expected {
return nil, fmt.Errorf("%s not set in URL query properly. Expected '%s', got %s", key, expected, actual)
}
}
return &http.Response{
StatusCode: http.StatusOK,
Body: ioutil.NopCloser(bytes.NewReader([]byte(""))),
}, nil
}),
}
err := client.ImageTag(context.Background(), "image_id", tagCase.reference)
if err != nil {
t.Fatal(err)
}
}
} | explode_data.jsonl/22953 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1046
} | [
2830,
3393,
1906,
5668,
1155,
353,
8840,
836,
8,
341,
42400,
3144,
1669,
3521,
3642,
23349,
842,
76196,
698,
60439,
37302,
1669,
3056,
1235,
341,
197,
197,
16291,
1843,
914,
198,
197,
42400,
2859,
4870,
2415,
14032,
30953,
198,
197,
594... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestStatus_SetUseI18n(t *testing.T) {
status := NewStatus(nil)
status.SetUseI18n(true)
if status.useI18n == false {
t.FailNow()
}
} | explode_data.jsonl/79122 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 63
} | [
2830,
3393,
2522,
14812,
10253,
40,
16,
23,
77,
1155,
353,
8840,
836,
8,
341,
23847,
1669,
1532,
2522,
27907,
340,
23847,
4202,
10253,
40,
16,
23,
77,
3715,
340,
743,
2639,
7397,
40,
16,
23,
77,
621,
895,
341,
197,
3244,
57243,
70... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestPeerGroupResolverRandomPeers(t *testing.T) {
signedBy, identities, err := GetPolicies(org1, org2, org3, org4)
if err != nil {
panic(err)
}
sigPolicyEnv := &common.SignaturePolicyEnvelope{
Version: 0,
Rule: NewNOutOfPolicy(1,
NewNOutOfPolicy(2,
signedBy[o1],
signedBy[o2],
),
NewNOutOfPolicy(2,
signedBy[o1],
signedBy[o3],
signedBy[o4],
),
),
Identities: identities,
}
pgResolver, err := NewRandomPeerGroupResolver(sigPolicyEnv)
if err != nil {
t.Fatal(err)
}
var peers []fab.Peer
for _, peer := range allPeers {
if rand.Int31n(2) == 1 {
peers = append(peers, peer)
}
}
for i := 0; i < 100; i++ {
pgResolver.Resolve(peers)
}
} | explode_data.jsonl/21579 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 334
} | [
2830,
3393,
30888,
2808,
18190,
13999,
10197,
388,
1155,
353,
8840,
836,
8,
341,
1903,
1542,
1359,
11,
39421,
11,
1848,
1669,
2126,
47,
42038,
36246,
16,
11,
1240,
17,
11,
1240,
18,
11,
1240,
19,
340,
743,
1848,
961,
2092,
341,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestStructure(t *testing.T) {
f := foresttest.Create("-a-") // a <- b; c
h := &Hierarchy{Forest: f}
l := zap.Logger(false)
tests := []struct {
name string
nnm string
pnm string
fail bool
}{
{name: "ok", nnm: "a", pnm: "c"},
{name: "missing parent", nnm: "a", pnm: "brumpf", fail: true},
{name: "self-cycle", nnm: "a", pnm: "a", fail: true},
{name: "other cycle", nnm: "a", pnm: "b", fail: true},
{name: "exclude kube-system", nnm: "a", pnm: "kube-system", fail: true},
{name: "exclude kube-public", nnm: "a", pnm: "kube-public", fail: true},
{name: "exclude hnc-system", nnm: "a", pnm: "hnc-system", fail: true},
{name: "exclude cert-manager", nnm: "a", pnm: "cert-manager", fail: true},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
// Setup
g := NewGomegaWithT(t)
hc := &api.HierarchyConfiguration{Spec: api.HierarchyConfigurationSpec{Parent: tc.pnm}}
hc.ObjectMeta.Name = api.Singleton
hc.ObjectMeta.Namespace = tc.nnm
req := &request{hc: hc}
// Test
got := h.handle(context.Background(), l, req)
// Report
logResult(t, got.AdmissionResponse.Result)
g.Expect(got.AdmissionResponse.Allowed).ShouldNot(Equal(tc.fail))
})
}
} | explode_data.jsonl/52646 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 540
} | [
2830,
3393,
22952,
1155,
353,
8840,
836,
8,
341,
1166,
1669,
13638,
1944,
7251,
13645,
64,
12,
899,
442,
264,
9119,
293,
26,
272,
198,
9598,
1669,
609,
85264,
90,
49578,
25,
282,
532,
8810,
1669,
32978,
12750,
3576,
692,
78216,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetReadyCondition(t *testing.T) {
tests := []struct {
name string
cmCertificate *cmv1alpha2.Certificate
want *cmv1alpha2.CertificateCondition
}{{
name: "ready",
cmCertificate: makeTestCertificate(cmmeta.ConditionTrue, "ready", "ready"),
want: &cmv1alpha2.CertificateCondition{
Type: cmv1alpha2.CertificateConditionReady,
Status: cmmeta.ConditionTrue,
Reason: "ready",
Message: "ready",
}}, {
name: "not ready",
cmCertificate: makeTestCertificate(cmmeta.ConditionFalse, "not ready", "not ready"),
want: &cmv1alpha2.CertificateCondition{
Type: cmv1alpha2.CertificateConditionReady,
Status: cmmeta.ConditionFalse,
Reason: "not ready",
Message: "not ready",
}}, {
name: "unknow",
cmCertificate: makeTestCertificate(cmmeta.ConditionUnknown, "unknown", "unknown"),
want: &cmv1alpha2.CertificateCondition{
Type: cmv1alpha2.CertificateConditionReady,
Status: cmmeta.ConditionUnknown,
Reason: "unknown",
Message: "unknown",
},
}}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
got := GetReadyCondition(test.cmCertificate)
if diff := cmp.Diff(test.want, got); diff != "" {
t.Errorf("GetReadyCondition (-want, +got) = %s", diff)
}
})
}
} | explode_data.jsonl/30878 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 541
} | [
2830,
3393,
1949,
19202,
10547,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
688,
914,
198,
197,
98316,
33202,
353,
6226,
85,
16,
7141,
17,
727,
20962,
198,
197,
50780,
688,
353,
6226,
85,
16,
7141,
17,
72... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestNewLogger(t *testing.T) {
if testing.Short() {
t.Skip("skipping syslog test during -short")
}
f, err := NewLogger(LOG_USER|LOG_INFO, 0)
if f == nil {
if err.Error() == "Unix syslog delivery error" {
t.Skip("skipping: syslogd not running")
}
t.Error(err)
}
} | explode_data.jsonl/17086 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 119
} | [
2830,
3393,
3564,
7395,
1155,
353,
8840,
836,
8,
341,
743,
7497,
55958,
368,
341,
197,
3244,
57776,
445,
4886,
5654,
74487,
1273,
2337,
481,
8676,
1138,
197,
532,
1166,
11,
1848,
1669,
1532,
7395,
24850,
9107,
91,
7243,
9068,
11,
220,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestHandleChaincodeDeployErroneousIndexFile(t *testing.T) {
channelName := "ch1"
env := NewTestVDBEnv(t)
env.Cleanup(channelName)
defer env.Cleanup(channelName)
db, err := env.DBProvider.GetDBHandle(channelName)
testutil.AssertNoError(t, err, "")
db.Open()
defer db.Close()
batch := statedb.NewUpdateBatch()
batch.Put("ns1", "key1", []byte(`{"asset_name": "marble1","color": "blue","size": 1,"owner": "tom"}`), version.NewHeight(1, 1))
batch.Put("ns1", "key2", []byte(`{"asset_name": "marble2","color": "blue","size": 2,"owner": "jerry"}`), version.NewHeight(1, 2))
// Create a tar file for test with 2 index definitions - one of them being errorneous
badSyntaxFileContent := `{"index":{"fields": This is a bad json}`
dbArtifactsTarBytes := testutil.CreateTarBytesForTest(
[]*testutil.TarFileEntry{
{"META-INF/statedb/couchdb/indexes/indexSizeSortName.json", `{"index":{"fields":[{"size":"desc"}]},"ddoc":"indexSizeSortName","name":"indexSizeSortName","type":"json"}`},
{"META-INF/statedb/couchdb/indexes/badSyntax.json", badSyntaxFileContent},
},
)
indexCapable, ok := db.(statedb.IndexCapable)
if !ok {
t.Fatalf("Couchdb state impl is expected to implement interface `statedb.IndexCapable`")
}
fileEntries, errExtract := ccprovider.ExtractFileEntries(dbArtifactsTarBytes, "couchdb")
testutil.AssertNoError(t, errExtract, "")
indexCapable.ProcessIndexesForChaincodeDeploy("ns1", fileEntries["META-INF/statedb/couchdb/indexes"])
//Sleep to allow time for index creation
time.Sleep(100 * time.Millisecond)
//Query should complete without error
_, err = db.ExecuteQuery("ns1", `{"selector":{"owner":"fred"}, "sort": [{"size": "desc"}]}`)
testutil.AssertNoError(t, err, "")
} | explode_data.jsonl/604 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 629
} | [
2830,
3393,
6999,
18837,
1851,
69464,
7747,
603,
782,
1552,
1703,
1155,
353,
8840,
836,
8,
341,
71550,
675,
1669,
330,
331,
16,
698,
57538,
1669,
1532,
2271,
53,
3506,
14359,
1155,
340,
57538,
727,
60639,
25923,
675,
340,
16867,
6105,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSessionsController_Create_ReapSessions(t *testing.T) {
t.Parallel()
app := cltest.NewApplicationEVMDisabled(t)
require.NoError(t, app.Start())
staleSession := cltest.NewSession()
staleSession.LastUsed = time.Now().Add(-cltest.MustParseDuration(t, "241h"))
q := pg.NewQ(app.GetSqlxDB(), app.GetLogger(), app.GetConfig())
mustInsertSession(t, q, &staleSession)
body := fmt.Sprintf(`{"email":"%s","password":"%s"}`, cltest.APIEmail, cltest.Password)
resp, err := http.Post(app.Config.ClientNodeURL()+"/sessions", "application/json", bytes.NewBufferString(body))
assert.NoError(t, err)
defer resp.Body.Close()
assert.Equal(t, http.StatusOK, resp.StatusCode)
var s []sessions.Session
gomega.NewWithT(t).Eventually(func() []sessions.Session {
s, err = app.SessionORM().Sessions(0, 10)
assert.NoError(t, err)
return s
}).Should(gomega.HaveLen(1))
for _, session := range s {
assert.NotEqual(t, session.ID, staleSession.ID)
}
} | explode_data.jsonl/12809 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 369
} | [
2830,
3393,
59062,
2051,
34325,
50693,
391,
59062,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
28236,
1669,
1185,
1944,
7121,
4988,
36,
11187,
25907,
1155,
340,
17957,
35699,
1155,
11,
906,
12101,
12367,
18388,
1574,
5283,
1669... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_signature_Unmarshal(t *testing.T) {
t.Parallel()
type (
testCase struct {
name string
blob []byte
want Signature
wantErr bool
}
testList []testCase
)
algos := GetAlgos()
tests := make(testList, 0, algos.Len()+1)
for name, algo := range algos {
sign, _ := mockSignature(algo)
blob, _ := sign.Marshal()
tests = append(tests, testCase{
name: name + "_OK",
blob: blob,
want: sign,
})
}
tests = append(tests, testCase{
name: "ERR",
blob: []byte(":"), // invalid data
wantErr: true,
})
for idx := range tests {
test := tests[idx]
t.Run(test.name, func(t *testing.T) {
t.Parallel()
got := NewSignature(nil)
if err := got.Unmarshal(test.blob); (err != nil) != test.wantErr {
t.Errorf("Unmarshal() error: %v | want: %v", err, test.wantErr)
}
if !got.Equals(test.want) {
t.Errorf("Unmarshal() got: %#v | want: %#v", got, test.want)
}
})
}
} | explode_data.jsonl/21348 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 449
} | [
2830,
3393,
39859,
40687,
27121,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
13158,
2399,
197,
18185,
4207,
2036,
341,
298,
11609,
262,
914,
198,
298,
2233,
1684,
262,
3056,
3782,
198,
298,
50780,
262,
32232,
198,
298,
50780,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestTimeoutOnNoServers(t *testing.T) {
s1 := RunServerOnPort(1222)
defer s1.Shutdown()
opts := nats.DefaultOptions
if runtime.GOOS == "windows" {
opts.Servers = testServers[:2]
opts.MaxReconnect = 2
opts.ReconnectWait = (100 * time.Millisecond)
} else {
opts.Servers = testServers
// 1 second total time wait
opts.MaxReconnect = 10
opts.ReconnectWait = (100 * time.Millisecond)
}
opts.NoRandomize = true
dch := make(chan bool)
opts.DisconnectedCB = func(nc *nats.Conn) {
// Suppress any additional calls
nc.SetDisconnectHandler(nil)
dch <- true
}
cch := make(chan bool)
opts.ClosedCB = func(_ *nats.Conn) {
cch <- true
}
nc, err := opts.Connect()
if err != nil {
t.Fatalf("Expected to connect, got err: %v\n", err)
}
defer nc.Close()
s1.Shutdown()
// On Windows, creating a connection to a non-running server takes
// more than a second. So be generous with WaitTime
// wait for disconnect
if e := WaitTime(dch, 5*time.Second); e != nil {
t.Fatal("Did not receive a disconnect callback message")
}
startWait := time.Now()
// Wait for ClosedCB
if e := WaitTime(cch, 5*time.Second); e != nil {
t.Fatal("Did not receive a closed callback message")
}
if runtime.GOOS != "windows" {
timeWait := time.Since(startWait)
// Use 500ms as variable time delta
variable := (500 * time.Millisecond)
expected := (time.Duration(opts.MaxReconnect) * opts.ReconnectWait)
if timeWait > (expected + variable) {
t.Fatalf("Waited too long for Closed state: %d\n", timeWait/time.Millisecond)
}
}
} | explode_data.jsonl/1557 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 600
} | [
2830,
3393,
7636,
1925,
2753,
78139,
1155,
353,
8840,
836,
8,
341,
1903,
16,
1669,
6452,
5475,
1925,
7084,
7,
16,
17,
17,
17,
340,
16867,
274,
16,
10849,
18452,
2822,
64734,
1669,
308,
1862,
13275,
3798,
198,
743,
15592,
97574,
3126,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.