text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestModifiedStatFails(t *testing.T) {
for _, alg := range hashAlgs {
vfsObj, root, ctx, err := newVerityRoot(t, alg)
if err != nil {
t.Fatalf("newVerityRoot: %v", err)
}
filename := "verity-test-file"
fd, _, err := newFileFD(ctx, t, vfsObj, root, filename, 0644)
if err != nil {
t.Fatalf("newFileFD: %v", err)
}
// Enable verity on the file.
enableVerity(ctx, t, fd)
lowerFD := fd.Impl().(*fileDescription).lowerFD
// Change the stat of the underlying file, and check that stat fails.
if err := lowerFD.SetStat(ctx, vfs.SetStatOptions{
Stat: linux.Statx{
Mask: uint32(linux.STATX_MODE),
Mode: 0777,
},
}); err != nil {
t.Fatalf("lowerFD.SetStat: %v", err)
}
if _, err := fd.Stat(ctx, vfs.StatOptions{}); err == nil {
t.Errorf("fd.Stat succeeded when it should fail")
}
}
} | explode_data.jsonl/56766 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 363
} | [
2830,
3393,
19148,
15878,
37,
6209,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17345,
1669,
2088,
5175,
2101,
5857,
341,
197,
5195,
3848,
5261,
11,
3704,
11,
5635,
11,
1848,
1669,
501,
10141,
487,
8439,
1155,
11,
17345,
340,
197,
743,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestSetExternalTagInvalidTagsList(t *testing.T) {
// Reset memory counters
helpers.ResetMemoryStats()
code := `
tags = [
('hostname', {'source_type': {'tag1': 'tag2'}}),
('hostname2', {'source_type2': ['tag3', 'tag4']}),
]
datadog_agent.set_external_tags(tags)
`
out, err := run(code)
if err != nil {
t.Fatal(err)
}
if out != "TypeError: dict value must be a list of tags" {
t.Errorf("Unexpected printed value: '%s'", out)
}
// Check for leaks
helpers.AssertMemoryUsage(t)
} | explode_data.jsonl/24557 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 205
} | [
2830,
3393,
1649,
25913,
5668,
7928,
15930,
852,
1155,
353,
8840,
836,
8,
341,
197,
322,
16932,
4938,
31532,
198,
197,
21723,
36660,
10642,
16635,
2822,
43343,
1669,
22074,
3244,
2032,
284,
2278,
197,
197,
492,
27806,
516,
5360,
2427,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestLogUnparsedMessageError(t *testing.T) {
consumer := constructTestConsumer()
originalMessage := sarama.ConsumerMessage{}
// try to call the tested function and capture its output
output, err := capture.ErrorOutput(func() {
log.Logger = log.Output(zerolog.New(os.Stderr))
main.LogUnparsedMessageError(consumer, &originalMessage, testEventMessage, errors.New(testError))
})
// check the captured text
checkCapture(t, err)
assert.Contains(t, output, testTopicName)
assert.Contains(t, output, testError)
assert.Contains(t, output, testEventMessage)
} | explode_data.jsonl/45133 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 182
} | [
2830,
3393,
2201,
1806,
41030,
2052,
1454,
1155,
353,
8840,
836,
8,
341,
37203,
11761,
1669,
9245,
2271,
29968,
741,
197,
9889,
2052,
1669,
274,
637,
64,
70471,
2052,
31483,
197,
322,
1430,
311,
1618,
279,
12510,
729,
323,
12322,
1181,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSelectChar(t *testing.T) {
t.Parallel()
tableName := tableName()
if _, err := testDb.Exec("CREATE TABLE " + tableName + "(c1 CHAR(1), c2 CHAR(4))"); err != nil {
t.Fatal(err)
}
testSes := getSes(t)
defer testSes.Close()
if _, err := testSes.PrepAndExe("INSERT INTO "+tableName+" VALUES (:1, :2)",
"A", "ABCD"); err != nil {
t.Fatal(err)
}
got := make([]interface{}, 0, 2)
for tN, tC := range []struct {
colDefs []ora.GoColumnType
want []interface{}
}{
{[]ora.GoColumnType{ora.B, ora.B}, []interface{}{false, false}},
{[]ora.GoColumnType{ora.S, ora.S}, []interface{}{"A", "ABCD"}},
{nil, []interface{}{"A", "ABCD"}},
} {
stmt, err := testSes.Prep("SELECT c1, c2 FROM "+tableName, tC.colDefs...)
if err != nil {
t.Fatal(err)
}
defer stmt.Close()
rset, err := stmt.Qry()
if err != nil {
t.Fatal(err)
}
got = got[:0]
rset.Next()
got = append(got, rset.Row[0], rset.Row[1])
t.Logf("%d. got %q, want %q.", tN, got, tC.want)
if len(got) != len(tC.want) || got[0] != tC.want[0] || got[1] != tC.want[1] {
t.Errorf("%d. got %q, want %q.", tN, got, tC.want)
}
}
} | explode_data.jsonl/48074 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 550
} | [
2830,
3393,
3379,
4768,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
26481,
675,
1669,
29544,
741,
743,
8358,
1848,
1669,
1273,
7994,
30798,
445,
22599,
14363,
330,
488,
29544,
488,
11993,
66,
16,
23997,
7,
16,
701,
272,
17,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestUpdate(t *testing.T) {
nodeNum := 5
bootPeers := []string{bootPeer(6611), bootPeer(6612)}
instances := []*gossipInstance{}
inst := createDiscoveryInstance(6611, "d1", bootPeers)
instances = append(instances, inst)
inst = createDiscoveryInstance(6612, "d2", bootPeers)
instances = append(instances, inst)
for i := 3; i <= nodeNum; i++ {
id := fmt.Sprintf("d%d", i)
inst = createDiscoveryInstance(6610+i, id, bootPeers)
instances = append(instances, inst)
}
fullMembership := func() bool {
return nodeNum-1 == len(instances[nodeNum-1].GetMembership())
}
waitUntilOrFail(t, fullMembership)
instances[0].UpdateMetadata([]byte("bla bla"))
instances[nodeNum-1].UpdateEndpoint("localhost:5511")
checkMembership := func() bool {
for _, member := range instances[nodeNum-1].GetMembership() {
if string(member.PKIid) == instances[0].comm.id {
if "bla bla" != string(member.Metadata) {
return false
}
}
}
for _, member := range instances[0].GetMembership() {
if string(member.PKIid) == instances[nodeNum-1].comm.id {
if "localhost:5511" != string(member.Endpoint) {
return false
}
}
}
return true
}
waitUntilOrFail(t, checkMembership)
stopInstances(t, instances)
} | explode_data.jsonl/62261 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 488
} | [
2830,
3393,
4289,
1155,
353,
8840,
836,
8,
341,
20831,
4651,
1669,
220,
20,
198,
197,
4619,
10197,
388,
1669,
3056,
917,
90,
4619,
30888,
7,
21,
21,
16,
16,
701,
10459,
30888,
7,
21,
21,
16,
17,
10569,
197,
47825,
1669,
29838,
70,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestInstallRelease_TillerVersion(t *testing.T) {
version.Version = "2.2.0"
c := helm.NewContext()
rs := rsFixture()
// TODO: Refactor this into a mock.
req := &services.InstallReleaseRequest{
Namespace: "spaced",
Chart: &chart.Chart{
Metadata: &chart.Metadata{Name: "hello", TillerVersion: ">=2.2.0"},
Templates: []*chart.Template{
{Name: "templates/hello", Data: []byte("hello: world")},
{Name: "templates/hooks", Data: []byte(manifestWithHook)},
},
},
}
_, err := rs.InstallRelease(c, req)
if err != nil {
t.Fatalf("Expected valid range. Got %q", err)
}
} | explode_data.jsonl/45607 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 243
} | [
2830,
3393,
24690,
16077,
1139,
15252,
5637,
1155,
353,
8840,
836,
8,
341,
74954,
35842,
284,
330,
17,
13,
17,
13,
15,
698,
1444,
1669,
33765,
7121,
1972,
741,
41231,
1669,
10036,
18930,
2822,
197,
322,
5343,
25,
8550,
5621,
419,
1119... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestExpandFileSourceWithKey(t *testing.T) {
fakeFS := fs.MakeFakeFS()
fakeFS.Create("dir/faaaaaaaaaabbbbbbbbbccccccccccccccccc")
fakeFS.Create("dir/foobar")
fakeFS.Create("dir/simplebar")
fakeFS.Create("dir/readme")
fa := flagsAndArgs{
FileSources: []string{"foo-key=dir/fa*", "bar-key=dir/foobar", "dir/simplebar"},
}
fa.ExpandFileSource(fakeFS)
expected := []string{
"foo-key=dir/faaaaaaaaaabbbbbbbbbccccccccccccccccc",
"bar-key=dir/foobar",
"dir/simplebar",
}
if !reflect.DeepEqual(fa.FileSources, expected) {
t.Fatalf("FileSources is not correctly expanded: %v", fa.FileSources)
}
} | explode_data.jsonl/53905 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 255
} | [
2830,
3393,
38946,
1703,
3608,
2354,
1592,
1155,
353,
8840,
836,
8,
341,
1166,
726,
8485,
1669,
8619,
50133,
52317,
8485,
741,
1166,
726,
8485,
7251,
445,
3741,
87562,
69440,
370,
87609,
87609,
55597,
55597,
55597,
638,
37054,
1138,
1166,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGetFileContentTypeMP4(t *testing.T) {
file := `../testdata/files/test7.mp4`
fileType, err := GetFileContentType(file)
if err != nil {
t.Log("Error -> ", err)
t.Fail()
}
if fileType != "video/mp4" {
t.Log(fileType)
t.Fail()
}
file = `../testdata/files/test8.mp4`
fileType, err = GetFileContentType(file)
if err != nil {
t.Log("Error -> ", err)
t.Fail()
}
if fileType != "video/mp4" {
t.Log(fileType)
t.Fail()
}
} | explode_data.jsonl/24012 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 204
} | [
2830,
3393,
1949,
1703,
29504,
5781,
19,
1155,
353,
8840,
836,
8,
341,
17661,
1669,
1565,
1244,
92425,
33220,
12697,
22,
16870,
19,
3989,
17661,
929,
11,
1848,
1669,
2126,
1703,
29504,
4866,
692,
743,
1848,
961,
2092,
341,
197,
3244,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestCASignPathlen(t *testing.T) {
var csrPathlenTests = []struct {
name string
caCertFile string
caKeyFile string
caProfile bool
csrFile string
err error
pathlen int
isZero bool
isCA bool
}{
{
name: "pathlen 1 signing pathlen 0",
caCertFile: testECDSACaFile,
caKeyFile: testECDSACaKeyFile,
caProfile: true,
csrFile: "testdata/inter_pathlen_0.csr",
err: nil,
pathlen: 0,
isZero: true,
isCA: true,
},
{
name: "pathlen 1 signing pathlen 1",
caCertFile: testECDSACaFile,
caKeyFile: testECDSACaKeyFile,
caProfile: true,
csrFile: "testdata/inter_pathlen_1.csr",
err: cferr.New(cferr.PolicyError, cferr.InvalidRequest),
},
{
name: "pathlen 0 signing pathlen 0",
caCertFile: testCaFile,
caKeyFile: testCaKeyFile,
caProfile: true,
csrFile: "testdata/inter_pathlen_0.csr",
err: cferr.New(cferr.PolicyError, cferr.InvalidRequest),
},
{
name: "pathlen 0 signing pathlen 1",
caCertFile: testCaFile,
caKeyFile: testCaKeyFile,
caProfile: true,
csrFile: "testdata/inter_pathlen_1.csr",
err: cferr.New(cferr.PolicyError, cferr.InvalidRequest),
},
{
name: "pathlen 0 signing pathlen unspecified",
caCertFile: testCaFile,
caKeyFile: testCaKeyFile,
caProfile: true,
csrFile: "testdata/inter_pathlen_unspecified.csr",
err: cferr.New(cferr.PolicyError, cferr.InvalidRequest),
},
{
name: "pathlen 1 signing unspecified pathlen",
caCertFile: testECDSACaFile,
caKeyFile: testECDSACaKeyFile,
caProfile: true,
csrFile: "testdata/inter_pathlen_unspecified.csr",
err: nil,
// golang x509 parses unspecified pathlen as MaxPathLen == -1 and
// MaxPathLenZero == false
pathlen: -1,
isZero: false,
isCA: true,
},
{
name: "non-ca singing profile signing pathlen 0",
caCertFile: testECDSACaFile,
caKeyFile: testECDSACaKeyFile,
caProfile: false,
csrFile: "testdata/inter_pathlen_0.csr",
err: cferr.New(cferr.PolicyError, cferr.InvalidRequest),
},
{
name: "non-ca singing profile signing pathlen 1",
caCertFile: testECDSACaFile,
caKeyFile: testECDSACaKeyFile,
caProfile: false,
csrFile: "testdata/inter_pathlen_1.csr",
err: cferr.New(cferr.PolicyError, cferr.InvalidRequest),
},
{
name: "non-ca singing profile signing pathlen 0",
caCertFile: testECDSACaFile,
caKeyFile: testECDSACaKeyFile,
caProfile: false,
csrFile: "testdata/inter_pathlen_unspecified.csr",
err: cferr.New(cferr.PolicyError, cferr.InvalidRequest),
},
}
for _, testCase := range csrPathlenTests {
csrPEM, err := ioutil.ReadFile(testCase.csrFile)
if err != nil {
t.Fatalf("%v", err)
}
req := &signer.Subject{
Names: []csr.Name{
{O: "sam certificate authority"},
},
CN: "localhost",
}
s := newCustomSigner(t, testCase.caCertFile, testCase.caKeyFile)
// No policy CSR whitelist: the normal set of CSR fields get passed through to
// certificate.
s.policy = &config.Signing{
Default: &config.SigningProfile{
Usage: []string{"cert sign", "crl sign"},
ExpiryString: "1h",
Expiry: 1 * time.Hour,
CAConstraint: config.CAConstraint{IsCA: testCase.caProfile,
MaxPathLen: testCase.pathlen,
MaxPathLenZero: testCase.isZero,
},
},
}
request := signer.SignRequest{
Hosts: []string{"127.0.0.1", "localhost"},
Request: string(csrPEM),
Subject: req,
}
certPEM, err := s.Sign(request)
if !reflect.DeepEqual(err, testCase.err) {
t.Fatalf("%s: expected: %v, actual: %v", testCase.name, testCase.err, err)
}
if err == nil {
cert, err := helpers.ParseCertificatePEM(certPEM)
if err != nil {
t.Fatalf("%s: %v", testCase.name, err)
}
if cert.IsCA != testCase.isCA {
t.Fatalf("%s: unexpected IsCA value: %v", testCase.name, cert.IsCA)
}
if cert.MaxPathLen != testCase.pathlen {
t.Fatalf("%s: unexpected pathlen value: %v", testCase.name, cert.MaxPathLen)
}
if cert.MaxPathLenZero != testCase.isZero {
t.Fatalf("%s: unexpected pathlen value: %v", testCase.name, cert.MaxPathLenZero)
}
}
}
} | explode_data.jsonl/71286 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2049
} | [
2830,
3393,
87516,
622,
1820,
2892,
1155,
353,
8840,
836,
8,
341,
2405,
68932,
1820,
2892,
18200,
284,
3056,
1235,
341,
197,
11609,
981,
914,
198,
197,
197,
924,
36934,
1703,
914,
198,
197,
197,
924,
1592,
1703,
220,
914,
198,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func Test_LoadDatastore(t *testing.T) {
testConfig, _ = config.LoadConfig(testConfigFile)
testConfig.Datastore.TypeConfig["mapping_file"] = testEssMapFile
ds, err := LoadDatastore(&testConfig.Datastore, simplelog.NewStdLogger())
if err != nil {
t.Fatalf("%s", err)
}
t.Logf("%v", ds)
} | explode_data.jsonl/44269 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 118
} | [
2830,
3393,
19553,
1043,
4314,
1155,
353,
8840,
836,
8,
341,
18185,
2648,
11,
716,
284,
2193,
13969,
2648,
8623,
2648,
1703,
340,
18185,
2648,
3336,
4314,
10184,
2648,
1183,
40792,
2458,
1341,
284,
1273,
37438,
2227,
1703,
271,
83336,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestDeleteOperatorScenarios(t *testing.T) {
for _, tt := range deleteOperatorScenarios {
testScenario(t, &tt)
}
documentScenarios(t, "Delete", deleteOperatorScenarios)
} | explode_data.jsonl/36683 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 68
} | [
2830,
3393,
6435,
18461,
3326,
60494,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17853,
1669,
2088,
3698,
18461,
3326,
60494,
341,
197,
18185,
54031,
1155,
11,
609,
5566,
340,
197,
532,
17470,
3326,
60494,
1155,
11,
330,
6435,
497,
3698,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestCustomer_GetBalance(t *testing.T) {
key := "test api key"
mockResponse := new(invdendpoint.Balance)
mockResponse.TotalOutstanding = 1
server, err := invdmockserver.New(200, mockResponse, "json", true)
if err != nil {
t.Fatal(err)
}
defer server.Close()
conn := mockConnection(key, server)
entity := conn.NewCustomer()
retrievedItem, err := entity.GetBalance()
if err != nil {
t.Fatal("Error retrieving entity", err)
}
if retrievedItem.TotalOutstanding != 1 {
t.Fatal("Error messages do not match up")
}
} | explode_data.jsonl/15010 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 189
} | [
2830,
3393,
12792,
13614,
21190,
1155,
353,
8840,
836,
8,
341,
23634,
1669,
330,
1944,
6330,
1376,
1837,
77333,
2582,
1669,
501,
5900,
16598,
32540,
1785,
4978,
340,
77333,
2582,
35997,
2662,
10070,
284,
220,
16,
271,
41057,
11,
1848,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestGetTeamByName(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
team := th.BasicTeam
th.TestForAllClients(t, func(t *testing.T, client *model.Client4) {
rteam, resp := client.GetTeamByName(team.Name, "")
CheckNoError(t, resp)
require.Equal(t, rteam.Name, team.Name, "wrong team")
_, resp = client.GetTeamByName("junk", "")
CheckNotFoundStatus(t, resp)
_, resp = client.GetTeamByName("", "")
CheckNotFoundStatus(t, resp)
})
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
_, resp := client.GetTeamByName(strings.ToUpper(team.Name), "")
CheckNoError(t, resp)
})
th.Client.Logout()
_, resp := th.Client.GetTeamByName(team.Name, "")
CheckUnauthorizedStatus(t, resp)
_, resp = th.SystemAdminClient.GetTeamByName(team.Name, "")
CheckNoError(t, resp)
th.LoginTeamAdmin()
team2 := &model.Team{DisplayName: "Name", Name: GenerateTestTeamName(), Email: th.GenerateTestEmail(), Type: model.TEAM_OPEN, AllowOpenInvite: false}
rteam2, _ := th.Client.CreateTeam(team2)
team3 := &model.Team{DisplayName: "Name", Name: GenerateTestTeamName(), Email: th.GenerateTestEmail(), Type: model.TEAM_INVITE, AllowOpenInvite: true}
rteam3, _ := th.Client.CreateTeam(team3)
th.LoginBasic()
// AllowInviteOpen is false and team is open, and user is not on team
_, resp = th.Client.GetTeamByName(rteam2.Name, "")
CheckForbiddenStatus(t, resp)
// AllowInviteOpen is true and team is invite only, and user is not on team
_, resp = th.Client.GetTeamByName(rteam3.Name, "")
CheckForbiddenStatus(t, resp)
} | explode_data.jsonl/70715 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 597
} | [
2830,
3393,
1949,
14597,
16898,
1155,
353,
8840,
836,
8,
341,
70479,
1669,
18626,
1155,
568,
3803,
15944,
741,
16867,
270,
836,
682,
4454,
741,
197,
9196,
1669,
270,
48868,
14597,
271,
70479,
8787,
2461,
2403,
47174,
1155,
11,
2915,
115... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPopUp_Move(t *testing.T) {
label := NewLabel("Hi")
win := test.NewWindow(NewLabel("OK"))
win.Resize(fyne.NewSize(50, 50))
pop := NewPopUp(label, win.Canvas())
pos := fyne.NewPos(10, 10)
pop.Move(pos)
innerPos := pop.Content.Position()
assert.Equal(t, pos.X+theme.Padding(), innerPos.X)
assert.Equal(t, pos.Y+theme.Padding(), innerPos.Y)
popPos := pop.Position()
assert.Equal(t, 0, popPos.X) // these are 0 as the popUp must fill our overlay
assert.Equal(t, 0, popPos.Y)
} | explode_data.jsonl/65215 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 203
} | [
2830,
3393,
11598,
2324,
66352,
1155,
353,
8840,
836,
8,
341,
29277,
1669,
1532,
2476,
445,
13048,
1138,
68452,
1669,
1273,
7121,
4267,
35063,
2476,
445,
3925,
5455,
68452,
77707,
955,
81708,
7121,
1695,
7,
20,
15,
11,
220,
20,
15,
11... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRemovePKCS7Padding(t *testing.T) {
type args struct {
s string
blockSize int
}
tests := []struct {
name string
args args
want bool
want1 string
}{
{"HasPadding", args{"YELLOW\x04\x04\x04\x04", 10}, true, "YELLOW"},
{"NoPadding", args{"YELLOW\x04\x04\x04", 10}, false, "YELLOW\x04\x04\x04"},
{"NotAtBlockSize", args{"YELLOW\x03\x03\x03", 10}, false, "YELLOW\x03\x03\x03"},
{"LastByteZero", args{"YELLOW\x03\x03\x00", 10}, false, "YELLOW\x03\x03\x00"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, got1 := RemovePKCS7Padding(tt.args.s, tt.args.blockSize)
if got != tt.want {
t.Errorf("RemovePKCS7Padding() got = %v, want %v", got, tt.want)
}
if got1 != tt.want1 {
t.Errorf("RemovePKCS7Padding() got1 = %v, want %v", got1, tt.want1)
}
})
}
} | explode_data.jsonl/66880 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 415
} | [
2830,
3393,
13021,
22242,
6412,
22,
21616,
1155,
353,
8840,
836,
8,
341,
13158,
2827,
2036,
341,
197,
1903,
260,
914,
198,
197,
47996,
1695,
526,
198,
197,
532,
78216,
1669,
3056,
1235,
341,
197,
11609,
220,
914,
198,
197,
31215,
220,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestInterfaceExtraction(t *testing.T) {
var s struct {
W io.Writer
}
s.W = os.Stdout
v := Indirect(ValueOf(&s)).Field(0).Interface()
if v != s.W.(interface{}) {
t.Error("Interface() on interface: ", v, s.W)
}
} | explode_data.jsonl/29545 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 95
} | [
2830,
3393,
5051,
840,
26425,
1155,
353,
8840,
836,
8,
341,
2405,
274,
2036,
341,
197,
17300,
6399,
47838,
198,
197,
630,
1903,
1175,
284,
2643,
83225,
198,
5195,
1669,
2263,
1226,
25346,
2124,
2099,
82,
4579,
1877,
7,
15,
568,
5051,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestManageData(t *testing.T) {
kp0 := newKeypair0()
sourceAccount := NewSimpleAccount(kp0.Address(), int64(3556091187167235))
manageData := ManageData{
Name: "Fruit preference",
Value: []byte("Apple"),
}
received, err := newSignedTransaction(
TransactionParams{
SourceAccount: &sourceAccount,
IncrementSequenceNum: true,
Operations: []Operation{&manageData},
BaseFee: MinBaseFee,
Timebounds: NewInfiniteTimeout(),
},
network.TestNetworkPassphrase,
kp0,
)
assert.NoError(t, err)
expected := "AAAAAgAAAADg3G3hclysZlFitS+s5zWyiiJD5B0STWy5LXCj6i5yxQAAAGQADKI/AAAABAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAACgAAABBGcnVpdCBwcmVmZXJlbmNlAAAAAQAAAAVBcHBsZQAAAAAAAAAAAAAB6i5yxQAAAEDtRCyQRKKgQ8iLEu7kicHtSzoplfxPtPTMhdRv/sq8UoIBVTxIw+S13Jv+jzs3tyLDLiGCVNXreUNlbfX+980K"
assert.Equal(t, expected, received, "Base 64 XDR should match")
} | explode_data.jsonl/20662 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 428
} | [
2830,
3393,
33076,
1043,
1155,
353,
8840,
836,
8,
341,
16463,
79,
15,
1669,
501,
6608,
1082,
1310,
15,
741,
47418,
7365,
1669,
1532,
16374,
7365,
5969,
79,
15,
26979,
1507,
526,
21,
19,
7,
18,
20,
20,
21,
15,
24,
16,
16,
23,
22,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMatcherWildcards(t *testing.T) {
matcher := NewMatcher(testSectionSize, [][][]byte{
{common.Address{}.Bytes(), common.Address{0x01}.Bytes()}, // Default address is not a wildcard
{common.Hash{}.Bytes(), common.Hash{0x01}.Bytes()}, // Default hash is not a wildcard
{common.Hash{0x01}.Bytes()}, // Plain rule, sanity check
{common.Hash{0x01}.Bytes(), nil}, // Wildcard suffix, drop rule
{nil, common.Hash{0x01}.Bytes()}, // Wildcard prefix, drop rule
{nil, nil}, // Wildcard combo, drop rule
{}, // Inited wildcard rule, drop rule
nil, // Proper wildcard rule, drop rule
})
if len(matcher.filters) != 3 {
t.Fatalf("filter system size mismatch: have %d, want %d", len(matcher.filters), 3)
}
if len(matcher.filters[0]) != 2 {
t.Fatalf("address clause size mismatch: have %d, want %d", len(matcher.filters[0]), 2)
}
if len(matcher.filters[1]) != 2 {
t.Fatalf("combo topic clause size mismatch: have %d, want %d", len(matcher.filters[1]), 2)
}
if len(matcher.filters[2]) != 1 {
t.Fatalf("singletone topic clause size mismatch: have %d, want %d", len(matcher.filters[2]), 1)
}
} | explode_data.jsonl/51279 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 653
} | [
2830,
3393,
37554,
40603,
25024,
1155,
353,
8840,
836,
8,
341,
2109,
28058,
1669,
1532,
37554,
8623,
9620,
1695,
11,
3056,
16613,
3782,
515,
197,
197,
90,
5464,
26979,
46391,
7078,
1507,
4185,
26979,
90,
15,
87,
15,
16,
7810,
7078,
76... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestExportWildcardFSNodeES6(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
export * from 'fs'
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatESModule,
AbsOutputFile: "/out.js",
Platform: config.PlatformNode,
},
})
} | explode_data.jsonl/38503 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 173
} | [
2830,
3393,
16894,
92988,
8485,
1955,
1570,
21,
1155,
353,
8840,
836,
8,
341,
11940,
57239,
25952,
33,
1241,
832,
1155,
11,
51450,
515,
197,
74075,
25,
2415,
14032,
30953,
515,
298,
197,
3115,
4085,
2857,
788,
22074,
571,
59440,
353,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCloudHypervisorAddVSock(t *testing.T) {
assert := assert.New(t)
clh := cloudHypervisor{}
clh.addVSock(1, "path")
assert.Equal(clh.vmconfig.Vsock.Cid, int64(1))
assert.Equal(clh.vmconfig.Vsock.Socket, "path")
} | explode_data.jsonl/68494 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 97
} | [
2830,
3393,
16055,
39,
1082,
31396,
2212,
26050,
1176,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
340,
39407,
71,
1669,
9437,
39,
1082,
31396,
31483,
39407,
71,
1364,
26050,
1176,
7,
16,
11,
330,
2343,
1138,
6948,
128... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParseErrors(t *testing.T) {
tests := []struct {
in string
wantErr bool
}{
{"http://[::1]", false},
{"http://[::1]:80", false},
{"http://[::1]:namedport", true}, // rfc3986 3.2.3
{"http://x:namedport", true}, // rfc3986 3.2.3
{"http://[::1]/", false},
{"http://[::1]a", true},
{"http://[::1]%23", true},
{"http://[::1%25en0]", false}, // valid zone id
{"http://[::1]:", false}, // colon, but no port OK
{"http://x:", false}, // colon, but no port OK
{"http://[::1]:%38%30", true}, // not allowed: % encoding only for non-ASCII
{"http://[::1%25%41]", false}, // RFC 6874 allows over-escaping in zone
{"http://[%10::1]", true}, // no %xx escapes in IP address
{"http://[::1]/%48", false}, // %xx in path is fine
{"http://%41:8080/", true}, // not allowed: % encoding only for non-ASCII
{"mysql://x@y(z:123)/foo", true}, // not well-formed per RFC 3986, golang.org/issue/33646
{"mysql://x@y(1.2.3.4:123)/foo", true},
{" http://foo.com", true}, // invalid character in schema
{"ht tp://foo.com", true}, // invalid character in schema
{"ahttp://foo.com", false}, // valid schema characters
{"1http://foo.com", true}, // invalid character in schema
{"http://[]%20%48%54%54%50%2f%31%2e%31%0a%4d%79%48%65%61%64%65%72%3a%20%31%32%33%0a%0a/", true}, // golang.org/issue/11208
{"http://a b.com/", true}, // no space in host name please
{"cache_object://foo", true}, // scheme cannot have _, relative path cannot have : in first segment
{"cache_object:foo", true},
{"cache_object:foo/bar", true},
{"cache_object/:foo/bar", false},
}
for _, tt := range tests {
u, err := Parse(tt.in)
if tt.wantErr {
if err == nil {
t.Errorf("Parse(%q) = %#v; want an error", tt.in, u)
}
continue
}
if err != nil {
t.Errorf("Parse(%q) = %v; want no error", tt.in, err)
}
}
} | explode_data.jsonl/71731 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 846
} | [
2830,
3393,
14463,
13877,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
17430,
414,
914,
198,
197,
50780,
7747,
1807,
198,
197,
59403,
197,
197,
4913,
1254,
1110,
58,
486,
16,
19076,
895,
1583,
197,
197,
4913,
1254,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestProbe(t *testing.T) {
comm1, port1 := newCommInstance(t, naiveSec)
defer comm1.Stop()
comm2, port2 := newCommInstance(t, naiveSec)
time.Sleep(time.Duration(1) * time.Second)
require.NoError(t, comm1.Probe(remotePeer(port2)))
_, err := comm1.Handshake(remotePeer(port2))
require.NoError(t, err)
tempPort, _, ll := getAvailablePort(t)
defer ll.Close()
require.Error(t, comm1.Probe(remotePeer(tempPort)))
_, err = comm1.Handshake(remotePeer(tempPort))
require.Error(t, err)
comm2.Stop()
time.Sleep(time.Duration(1) * time.Second)
require.Error(t, comm1.Probe(remotePeer(port2)))
_, err = comm1.Handshake(remotePeer(port2))
require.Error(t, err)
comm2, port2 = newCommInstance(t, naiveSec)
defer comm2.Stop()
time.Sleep(time.Duration(1) * time.Second)
require.NoError(t, comm2.Probe(remotePeer(port1)))
_, err = comm2.Handshake(remotePeer(port1))
require.NoError(t, err)
require.NoError(t, comm1.Probe(remotePeer(port2)))
_, err = comm1.Handshake(remotePeer(port2))
require.NoError(t, err)
// Now try a deep probe with an expected PKI-ID that doesn't match
wrongRemotePeer := remotePeer(port2)
if wrongRemotePeer.PKIID[0] == 0 {
wrongRemotePeer.PKIID[0] = 1
} else {
wrongRemotePeer.PKIID[0] = 0
}
_, err = comm1.Handshake(wrongRemotePeer)
require.Error(t, err)
// Try a deep probe with a nil PKI-ID
endpoint := fmt.Sprintf("127.0.0.1:%d", port2)
id, err := comm1.Handshake(&RemotePeer{Endpoint: endpoint})
require.NoError(t, err)
require.Equal(t, api.PeerIdentityType(endpoint), id)
} | explode_data.jsonl/42173 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 629
} | [
2830,
3393,
81426,
1155,
353,
8840,
836,
8,
341,
197,
3621,
16,
11,
2635,
16,
1669,
501,
17977,
2523,
1155,
11,
49665,
8430,
340,
16867,
1063,
16,
30213,
741,
197,
3621,
17,
11,
2635,
17,
1669,
501,
17977,
2523,
1155,
11,
49665,
843... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestHistogramDataPointSlice_CopyTo(t *testing.T) {
dest := NewHistogramDataPointSlice()
// Test CopyTo to empty
NewHistogramDataPointSlice().CopyTo(dest)
assert.EqualValues(t, NewHistogramDataPointSlice(), dest)
// Test CopyTo larger slice
generateTestHistogramDataPointSlice().CopyTo(dest)
assert.EqualValues(t, generateTestHistogramDataPointSlice(), dest)
// Test CopyTo same size slice
generateTestHistogramDataPointSlice().CopyTo(dest)
assert.EqualValues(t, generateTestHistogramDataPointSlice(), dest)
} | explode_data.jsonl/19545 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 169
} | [
2830,
3393,
77210,
1043,
2609,
33236,
77637,
1249,
1155,
353,
8840,
836,
8,
341,
49616,
1669,
1532,
77210,
1043,
2609,
33236,
741,
197,
322,
3393,
14540,
1249,
311,
4287,
198,
197,
3564,
77210,
1043,
2609,
33236,
1005,
12106,
1249,
27010,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGenesisStateFromGenFile(t *testing.T) {
cdc := codec.NewLegacyAmino()
genFile := "../../../tests/fixtures/adr-024-coin-metadata_genesis.json"
genesisState, _, err := types.GenesisStateFromGenFile(genFile)
require.NoError(t, err)
var bankGenesis banktypes.GenesisState
cdc.MustUnmarshalJSON(genesisState[banktypes.ModuleName], &bankGenesis)
require.True(t, bankGenesis.Params.DefaultSendEnabled)
require.Equal(t, "1000nametoken,100000000stake", bankGenesis.Balances[0].GetCoins().String())
require.Equal(t, "cx106vrzv5xkheqhjm023pxcxlqmcjvuhtfyachz4", bankGenesis.Balances[0].GetAddress().String())
require.Equal(t, "The native staking token of the Chain Hub.", bankGenesis.DenomMetadata[0].GetDescription())
require.Equal(t, "uatom", bankGenesis.DenomMetadata[0].GetBase())
require.Equal(t, "matom", bankGenesis.DenomMetadata[0].GetDenomUnits()[1].GetDenom())
require.Equal(t, []string{"milliatom"}, bankGenesis.DenomMetadata[0].GetDenomUnits()[1].GetAliases())
require.Equal(t, uint32(3), bankGenesis.DenomMetadata[0].GetDenomUnits()[1].GetExponent())
} | explode_data.jsonl/76047 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 415
} | [
2830,
3393,
84652,
1397,
3830,
9967,
1703,
1155,
353,
8840,
836,
8,
341,
1444,
7628,
1669,
34647,
7121,
77415,
32,
31824,
2822,
82281,
1703,
1669,
30630,
23841,
94275,
14,
14666,
12,
15,
17,
19,
12,
7160,
96431,
16322,
13774,
4323,
698,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestValidateEdgeCases(t *testing.T) {
t.Parallel()
// A single import, but with brackets is ok.
imports := `import (
"os"
)`
testValidate(t, grouperGoimports{}, vopts{}, imports)
// Comments are allowed.
imports = `import (
// Comment on a line
"os" // End-of-line comment
/* Multi
line
comment */
"github.com/urfave/cli"
// Multi
// line,
// the other way.
"golang.org/x/net/context"
)`
testValidate(t, grouperGoimports{}, vopts{}, imports)
// Extra newlines are not allowed.
imports = `import (
"os"
"golang.org/x/net/context"
)`
testValidate(t, grouperGoimports{}, vopts{verrstr: errstrGroupExtraLine}, imports)
// Parse errors yield errors.
imports = `import (
"os
)`
testValidate(t, grouperGoimports{}, vopts{err: true}, imports)
// Special imports are allowed, sorted by actual import path.
imports = `import (
b "os"
a "strings"
_ "testing"
. "golang.org/x/net/context"
)`
testValidate(t, grouperGoimports{}, vopts{}, imports)
} | explode_data.jsonl/508 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 419
} | [
2830,
3393,
17926,
11656,
37302,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
197,
322,
362,
3175,
1159,
11,
714,
448,
38929,
374,
5394,
624,
21918,
82,
1669,
1565,
474,
2399,
197,
197,
34482,
698,
197,
49237,
18185,
17926,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetSecrets(t *testing.T) {
kubeClient := fakek8s.NewSimpleClientset()
secretClient := kubeinformers.NewSharedInformerFactory(kubeClient, 0).Core().V1().Secrets()
createSecret := func(secret *corev1.Secret) {
kubeClient.CoreV1().Secrets(secret.Namespace).Create(secret)
secretClient.Informer().GetIndexer().Add(secret)
}
cases := []struct {
name string
secret *corev1.Secret
ci *v1alpha1.ClusterIngress
expected map[string]*corev1.Secret
wantErr bool
}{{
name: "Get secrets successfully.",
secret: &testSecret,
ci: &ci,
expected: map[string]*corev1.Secret{
"knative-serving/secret0": &testSecret,
},
}, {
name: "Fail to get secrets",
secret: &corev1.Secret{},
ci: &v1alpha1.ClusterIngress{
Spec: v1alpha1.IngressSpec{
TLS: []v1alpha1.IngressTLS{{
Hosts: []string{"example.com"},
SecretName: "no-exist-secret",
SecretNamespace: "no-exist-namespace",
}},
},
},
wantErr: true,
}}
for _, c := range cases {
createSecret(c.secret)
t.Run(c.name, func(t *testing.T) {
secrets, err := GetSecrets(c.ci, secretClient.Lister())
if (err != nil) != c.wantErr {
t.Fatalf("Test: %s; GetSecrets error = %v, WantErr %v", c.name, err, c.wantErr)
}
if diff := cmp.Diff(c.expected, secrets); diff != "" {
t.Errorf("Unexpected secrets (-want, +got): %v", diff)
}
})
}
} | explode_data.jsonl/18883 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 624
} | [
2830,
3393,
1949,
19773,
82,
1155,
353,
8840,
836,
8,
341,
16463,
3760,
2959,
1669,
12418,
74,
23,
82,
7121,
16374,
2959,
746,
741,
197,
20474,
2959,
1669,
80958,
40440,
388,
7121,
16997,
641,
34527,
4153,
5969,
3760,
2959,
11,
220,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSyncReplicaSetDormancy(t *testing.T) {
// Setup a test server so we can lie about the current state of pods
fakeHandler := utiltesting.FakeHandler{
StatusCode: 200,
ResponseBody: "{}",
SkipRequestFn: skipListerFunc,
T: t,
}
testServer := httptest.NewServer(&fakeHandler)
defer testServer.Close()
client := clientset.NewForConfigOrDie(&restclient.Config{Host: testServer.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &schema.GroupVersion{Group: "", Version: "v1"}}})
fakePodControl := controller.FakePodControl{}
stopCh := make(chan struct{})
defer close(stopCh)
manager, informers := testNewReplicaSetControllerFromClient(client, stopCh, BurstReplicas)
manager.podControl = &fakePodControl
labelMap := map[string]string{"foo": "bar"}
rsSpec := newReplicaSet(2, labelMap)
informers.Apps().V1().ReplicaSets().Informer().GetIndexer().Add(rsSpec)
newPodList(informers.Core().V1().Pods().Informer().GetIndexer(), 1, v1.PodRunning, labelMap, rsSpec, "pod")
// Creates a replica and sets expectations
rsSpec.Status.Replicas = 1
rsSpec.Status.ReadyReplicas = 1
rsSpec.Status.AvailableReplicas = 1
manager.syncReplicaSet(GetKey(rsSpec, t))
validateSyncReplicaSet(t, &fakePodControl, 1, 0, 0)
// Expectations prevents replicas but not an update on status
rsSpec.Status.Replicas = 0
rsSpec.Status.ReadyReplicas = 0
rsSpec.Status.AvailableReplicas = 0
fakePodControl.Clear()
manager.syncReplicaSet(GetKey(rsSpec, t))
validateSyncReplicaSet(t, &fakePodControl, 0, 0, 0)
// Get the key for the controller
rsKey, err := controller.KeyFunc(rsSpec)
if err != nil {
t.Errorf("Couldn't get key for object %#v: %v", rsSpec, err)
}
// Lowering expectations should lead to a sync that creates a replica, however the
// fakePodControl error will prevent this, leaving expectations at 0, 0
manager.expectations.CreationObserved(rsKey)
rsSpec.Status.Replicas = 1
rsSpec.Status.ReadyReplicas = 1
rsSpec.Status.AvailableReplicas = 1
fakePodControl.Clear()
fakePodControl.Err = fmt.Errorf("Fake Error")
manager.syncReplicaSet(GetKey(rsSpec, t))
validateSyncReplicaSet(t, &fakePodControl, 1, 0, 0)
// This replica should not need a Lowering of expectations, since the previous create failed
fakePodControl.Clear()
fakePodControl.Err = nil
manager.syncReplicaSet(GetKey(rsSpec, t))
validateSyncReplicaSet(t, &fakePodControl, 1, 0, 0)
// 2 PUT for the ReplicaSet status during dormancy window.
// Note that the pod creates go through pod control so they're not recorded.
fakeHandler.ValidateRequestCount(t, 2)
} | explode_data.jsonl/7972 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 872
} | [
2830,
3393,
12154,
18327,
15317,
1649,
35,
493,
6572,
1155,
353,
8840,
836,
8,
341,
197,
322,
18626,
264,
1273,
3538,
773,
582,
646,
10246,
911,
279,
1482,
1584,
315,
54587,
198,
1166,
726,
3050,
1669,
4094,
8840,
991,
726,
3050,
515,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGossipStateProvider_TestStateMessages(t *testing.T) {
t.Parallel()
bootPeer, bootPort := newBootNode(0, newCommitter(), noopPeerIdentityAcceptor)
defer bootPeer.shutdown()
peer := newPeerNode(1, newCommitter(), noopPeerIdentityAcceptor, bootPort)
defer peer.shutdown()
naiveStateMsgPredicate := func(message interface{}) bool {
return message.(proto.ReceivedMessage).GetGossipMessage().IsRemoteStateMessage()
}
_, bootCh := bootPeer.g.Accept(naiveStateMsgPredicate, true)
_, peerCh := peer.g.Accept(naiveStateMsgPredicate, true)
wg := sync.WaitGroup{}
wg.Add(2)
go func() {
msg := <-bootCh
t.Log("Bootstrap node got message, ", msg)
assert.True(t, msg.GetGossipMessage().GetStateRequest() != nil)
msg.Respond(&proto.GossipMessage{
Content: &proto.GossipMessage_StateResponse{StateResponse: &proto.RemoteStateResponse{Payloads: nil}},
})
wg.Done()
}()
go func() {
msg := <-peerCh
t.Log("Peer node got an answer, ", msg)
assert.True(t, msg.GetGossipMessage().GetStateResponse() != nil)
wg.Done()
}()
readyCh := make(chan struct{})
go func() {
wg.Wait()
readyCh <- struct{}{}
}()
chainID := common.ChainID(util.GetTestChainID())
waitUntilTrueOrTimeout(t, func() bool {
return len(peer.g.PeersOfChannel(chainID)) == 1
}, 30*time.Second)
t.Log("Sending gossip message with remote state request")
peer.g.Send(&proto.GossipMessage{
Content: &proto.GossipMessage_StateRequest{StateRequest: &proto.RemoteStateRequest{StartSeqNum: 0, EndSeqNum: 1}},
}, &comm.RemotePeer{Endpoint: peer.g.PeersOfChannel(chainID)[0].Endpoint, PKIID: peer.g.PeersOfChannel(chainID)[0].PKIid})
t.Log("Waiting until peers exchange messages")
select {
case <-readyCh:
{
t.Log("Done!!!")
}
case <-time.After(time.Duration(10) * time.Second):
{
t.Fail()
}
}
} | explode_data.jsonl/5645 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 701
} | [
2830,
3393,
38,
41473,
1397,
5179,
32541,
1397,
15820,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
197,
4619,
30888,
11,
10459,
7084,
1669,
501,
17919,
1955,
7,
15,
11,
501,
1092,
16126,
1507,
60829,
30888,
18558,
11654,
15349... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRecursiveNext(t *testing.T) {
protest.AllowRecording(t)
testcases := []nextTest{
{6, 7},
{7, 10},
{10, 11},
{11, 17},
}
testseq("increment", contNext, testcases, "main.Increment", t)
withTestProcess("increment", t, func(p *proc.Target, fixture protest.Fixture) {
bp := setFunctionBreakpoint(p, t, "main.Increment")
assertNoError(p.Continue(), t, "Continue")
err := p.ClearBreakpoint(bp.Addr)
assertNoError(err, t, "ClearBreakpoint")
assertNoError(p.Next(), t, "Next 1")
assertNoError(p.Next(), t, "Next 2")
assertNoError(p.Next(), t, "Next 3")
frameoff0 := getFrameOff(p, t)
assertNoError(p.Step(), t, "Step")
frameoff1 := getFrameOff(p, t)
if frameoff0 == frameoff1 {
t.Fatalf("did not step into function?")
}
assertLineNumber(p, t, 6, "program did not continue to expected location,")
assertNoError(p.Next(), t, "Next 4")
assertLineNumber(p, t, 7, "program did not continue to expected location,")
assertNoError(p.StepOut(), t, "StepOut")
assertLineNumber(p, t, 11, "program did not continue to expected location,")
frameoff2 := getFrameOff(p, t)
if frameoff0 != frameoff2 {
t.Fatalf("frame offset mismatch %x != %x", frameoff0, frameoff2)
}
})
} | explode_data.jsonl/56287 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 479
} | [
2830,
3393,
78542,
5847,
1155,
353,
8840,
836,
8,
341,
197,
776,
1944,
29081,
52856,
1155,
340,
18185,
23910,
1669,
3056,
3600,
2271,
515,
197,
197,
90,
21,
11,
220,
22,
1583,
197,
197,
90,
22,
11,
220,
16,
15,
1583,
197,
197,
90,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestArgon2idHashSaltValidValues(t *testing.T) {
var err error
var hash string
datas := utils.SliceString(HashingPossibleSaltCharacters, 16)
for _, salt := range datas {
hash, err = HashPassword("password", salt, HashingAlgorithmArgon2id, 1, 8, 1, 32, 16)
assert.NoError(t, err)
assert.Equal(t, fmt.Sprintf("$argon2id$v=19$m=8,t=1,p=1$%s$", salt), hash[0:44])
}
} | explode_data.jsonl/40167 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 157
} | [
2830,
3393,
2735,
263,
17,
307,
6370,
47318,
4088,
6227,
1155,
353,
8840,
836,
8,
341,
2405,
1848,
1465,
271,
2405,
5175,
914,
271,
2698,
19346,
1669,
12439,
95495,
703,
7,
6370,
287,
65222,
47318,
37489,
11,
220,
16,
21,
692,
2023,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestWithClient(t *testing.T) {
is := is.New(t)
var calls int
testClient := &http.Client{
Transport: roundTripperFunc(func(req *http.Request) (*http.Response, error) {
calls++
resp := &http.Response{
Body: ioutil.NopCloser(strings.NewReader(`{"data":{"key":"value"}}`)),
}
return resp, nil
}),
}
ctx := context.Background()
client := NewClient("", WithHTTPClient(testClient), UseMultipartForm())
req := NewRequest(``)
client.Run(ctx, req, nil)
is.Equal(calls, 1) // calls
} | explode_data.jsonl/53420 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 202
} | [
2830,
3393,
2354,
2959,
1155,
353,
8840,
836,
8,
341,
19907,
1669,
374,
7121,
1155,
340,
2405,
6738,
526,
198,
18185,
2959,
1669,
609,
1254,
11716,
515,
197,
197,
27560,
25,
4778,
21884,
6922,
9626,
18552,
6881,
353,
1254,
9659,
8,
46... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestValidateHTTPMethod(t *testing.T) {
type testCases struct {
Value string
ErrCount int
}
invalidCases := []testCases{
{
Value: "incorrect",
ErrCount: 1,
},
{
Value: "delete",
ErrCount: 1,
},
}
for _, tc := range invalidCases {
_, errors := validateHTTPMethod(tc.Value, "http_method")
if len(errors) != tc.ErrCount {
t.Fatalf("Expected %q to trigger a validation error.", tc.Value)
}
}
validCases := []testCases{
{
Value: "ANY",
ErrCount: 0,
},
{
Value: "DELETE",
ErrCount: 0,
},
{
Value: "OPTIONS",
ErrCount: 0,
},
}
for _, tc := range validCases {
_, errors := validateHTTPMethod(tc.Value, "http_method")
if len(errors) != tc.ErrCount {
t.Fatalf("Expected %q not to trigger a validation error.", tc.Value)
}
}
} | explode_data.jsonl/78567 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 390
} | [
2830,
3393,
17926,
9230,
3523,
1155,
353,
8840,
836,
8,
341,
13158,
1273,
37302,
2036,
341,
197,
47399,
262,
914,
198,
197,
197,
7747,
2507,
526,
198,
197,
630,
197,
11808,
37302,
1669,
3056,
1944,
37302,
515,
197,
197,
515,
298,
4739... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestPolylineSmoothen(t *testing.T) {
test.T(t, (&Polyline{}).Smoothen(), MustParseSVG(""))
test.T(t, (&Polyline{}).Add(0, 0).Add(10, 0).Smoothen(), MustParseSVG("M0 0L10 0"))
test.T(t, (&Polyline{}).Add(0, 0).Add(5, 10).Add(10, 0).Add(5, -10).Smoothen(), MustParseSVG("M0 0C1.444444 5.111111 2.888889 10.22222 5 10C7.111111 9.777778 9.888889 4.222222 10 0C10.11111 -4.222222 7.555556 -7.111111 5 -10"))
test.T(t, (&Polyline{}).Add(0, 0).Add(5, 10).Add(10, 0).Add(5, -10).Add(0, 0).Smoothen(), MustParseSVG("M0 0C0 5 2.5 10 5 10C7.5 10 10 5 10 0C10 -5 7.5 -10 5 -10C2.5 -10 0 -5 0 0z"))
} | explode_data.jsonl/42816 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 306
} | [
2830,
3393,
38164,
1056,
10673,
2624,
3391,
1155,
353,
8840,
836,
8,
341,
18185,
836,
1155,
11,
15899,
38164,
1056,
6257,
568,
10673,
2624,
3391,
1507,
15465,
14463,
64397,
73303,
18185,
836,
1155,
11,
15899,
38164,
1056,
6257,
568,
2212,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSwiftTestCase(t *testing.T) {
reader := strings.NewReader(`import XCTest
import SwiftFonts
class FontSorterTests: XCTestCase {
let sorter = FontSorter()
func testCompareHyphenWithNoHyphen() {
let fonts = ["Arial-ItalicMT", "ArialMT"]
let expected = ["ArialMT", "Arial-ItalicMT"]
let sorted = sorter.sortFontNames(fonts)
XCTAssertEqual(expected[0], sorted[0], "the array should be sorted properly")
XCTAssertEqual(expected[1], sorted[1], "the array should be sorted properly")
}
func testCompareHyphenWithHyphen() {
let fonts = ["Avenir-Roman", "Avenir-Oblique"]
let expected = ["Avenir-Oblique", "Avenir-Roman"]
let sorted = sorter.sortFontNames(fonts)
XCTAssertEqual(expected[0], sorted[0], "when two fonts contain a hyphen, they should be sorted alphabetically")
XCTAssertEqual(expected[1], sorted[1], "when two fonts contain a hyphen, they should be sorted alphabetically")
}
}
`)
result, err := dialect.Examine("Swift", "foo.swift", reader, nil)
if err != nil {
t.Fatal(err)
}
if result == nil {
t.Fatal("result was nil")
}
if result.Loc != 24 {
t.Fatalf("result.Loc should have been 24, was %d", result.Loc)
}
if result.Sloc != 19 {
t.Fatalf("result.Sloc should have been 19, was %d", result.Sloc)
}
if result.Comments != 0 {
t.Fatalf("result.Comments should have been 0, was %d", result.Comments)
}
if result.Blanks != 5 {
t.Fatalf("result.Blanks should have been 5, was %d", result.Blanks)
}
if result.IsTest == false {
t.Fatal("result.IsTest should have been true, was false")
}
} | explode_data.jsonl/44409 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 639
} | [
2830,
3393,
55336,
16458,
1155,
353,
8840,
836,
8,
341,
61477,
1669,
9069,
68587,
5809,
474,
41240,
198,
474,
23670,
55369,
271,
1040,
9562,
10231,
261,
18200,
25,
62810,
1476,
262,
1077,
90452,
284,
9562,
10231,
261,
2822,
262,
2915,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func TestRPC_Relay(t *testing.T) {
if testing.Short() {
t.Skip("skipping in short mode")
}
codec.UpgradeHeight = 7000
kb := getInMemoryKeybase()
genBZ, _, validators, app := fiveValidatorsOneAppGenesis()
_, _, cleanup := NewInMemoryTendermintNode(t, genBZ)
// setup relay endpoint
expectedRequest := `"jsonrpc":"2.0","method":"web3_sha3","params":["0x68656c6c6f20776f726c64"],"id":64`
expectedResponse := "0x47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad"
gock.New(dummyChainsURL).
Post("").
BodyString(expectedRequest).
Reply(200).
BodyString(expectedResponse)
appPrivateKey, err := kb.ExportPrivateKeyObject(app.Address, "test")
assert.Nil(t, err)
// setup AAT
aat := pocketTypes.AAT{
Version: "0.0.1",
ApplicationPublicKey: appPrivateKey.PublicKey().RawString(),
ClientPublicKey: appPrivateKey.PublicKey().RawString(),
ApplicationSignature: "",
}
sig, err := appPrivateKey.Sign(aat.Hash())
if err != nil {
panic(err)
}
aat.ApplicationSignature = hex.EncodeToString(sig)
payload := pocketTypes.Payload{
Data: expectedRequest,
Method: "POST",
}
// setup relay
relay := pocketTypes.Relay{
Payload: payload,
Meta: pocketTypes.RelayMeta{BlockHeight: 5}, // todo race condition here
Proof: pocketTypes.RelayProof{
Entropy: 32598345349034509,
SessionBlockHeight: 1,
ServicerPubKey: validators[0].PublicKey.RawString(),
Blockchain: dummyChainsHash,
Token: aat,
Signature: "",
},
}
relay.Proof.RequestHash = relay.RequestHashString()
sig, err = appPrivateKey.Sign(relay.Proof.Hash())
if err != nil {
panic(err)
}
relay.Proof.Signature = hex.EncodeToString(sig)
relay2 := pocketTypes.Relay{
Payload: payload,
Meta: pocketTypes.RelayMeta{BlockHeight: 5}, // todo race condition here
Proof: pocketTypes.RelayProof{
Entropy: 32598345349034519,
SessionBlockHeight: 1,
ServicerPubKey: validators[0].PublicKey.RawString(),
Blockchain: dummyChainsHash,
Token: aat,
Signature: "",
},
}
relay2.Proof.RequestHash = relay2.RequestHashString()
sig2, err := appPrivateKey.Sign(relay2.Proof.Hash())
if err != nil {
panic(err)
}
relay2.Proof.Signature = hex.EncodeToString(sig2)
// setup the query
_, stopCli, evtChan := subscribeTo(t, tmTypes.EventNewBlock)
<-evtChan // Wait for block
q := newClientRequest("relay", newBody(relay))
rec := httptest.NewRecorder()
Relay(rec, q, httprouter.Params{})
resp := getJSONResponse(rec)
var response RPCRelayResponse
err = json.Unmarshal(resp, &response)
assert.Nil(t, err)
assert.Equal(t, expectedResponse, response.Response)
gock.Off()
<-evtChan // Wait for block
gock.New(dummyChainsURL).
Post("").
BodyString(expectedRequest).
Reply(200).
BodyString(expectedResponse)
q2 := newClientRequest("relay", newBody(relay2))
rec2 := httptest.NewRecorder()
Relay(rec2, q2, httprouter.Params{})
resp = getJSONResponse(rec2)
var response2 RPCRelayResponse
err = json.Unmarshal(resp, &response2)
assert.Nil(t, err)
assert.Equal(t, expectedResponse, response2.Response)
gock.Off()
cleanup()
stopCli()
} | explode_data.jsonl/44729 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1327
} | [
2830,
3393,
29528,
2568,
6895,
1155,
353,
8840,
836,
8,
341,
743,
7497,
55958,
368,
341,
197,
3244,
57776,
445,
4886,
5654,
304,
2805,
3856,
1138,
197,
532,
43343,
66,
13,
43861,
3640,
284,
220,
22,
15,
15,
15,
271,
16463,
65,
1669,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestMulUnbalanced(t *testing.T) {
defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(1))
x := rndNat(50000)
y := rndNat(40)
allocSize := allocBytes(func() {
nat(nil).mul(x, y)
})
inputSize := uint64(len(x)+len(y)) * _S
if ratio := allocSize / uint64(inputSize); ratio > 10 {
t.Errorf("multiplication uses too much memory (%d > %d times the size of inputs)", allocSize, ratio)
}
} | explode_data.jsonl/2184 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 159
} | [
2830,
3393,
59155,
1806,
58402,
1155,
353,
8840,
836,
8,
341,
16867,
15592,
1224,
1898,
2954,
9117,
6412,
89467,
1224,
1898,
2954,
9117,
6412,
7,
16,
1171,
10225,
1669,
37193,
65214,
7,
20,
15,
15,
15,
15,
340,
14522,
1669,
37193,
652... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestInstallHgHooksQuoted(t *testing.T) {
assert := assert.New(t)
repo, client := helpers.CreateHgRepo(t, "hg-repo with a space")
defer helpers.CleanupHgRepo(t, client)
repo.InstallHooks("/tmp/config with a space.json", false)
hgrc, err := ini.Load(filepath.Join(repo.Path, ".hg", "hgrc"))
assert.Nil(err)
exePath, err := filepath.Abs(os.Args[0])
assert.Nil(err)
assert.Equal(
fmt.Sprintf("%s --config '/tmp/config with a space.json' trigger-webhooks 'hg-repo with a space' push", exePath),
hgrc.Section("hooks").Key("changegroup.rbgateway").String(),
)
} | explode_data.jsonl/57195 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 239
} | [
2830,
3393,
24690,
39,
70,
67769,
2183,
9253,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
692,
17200,
5368,
11,
2943,
1669,
30187,
7251,
39,
70,
25243,
1155,
11,
330,
66602,
5504,
5368,
448,
264,
3550,
1138,
16867,
301... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestValidateCreate(t *testing.T) {
defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.InPlacePodVerticalScaling, true)()
namespace := "test"
handler := NewPodResourceAllocation()
pod := api.Pod{
ObjectMeta: metav1.ObjectMeta{Name: "pod1", Namespace: namespace},
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: "c1",
Image: "image",
},
},
},
}
resources1 := api.ResourceList{
api.ResourceCPU: resource.MustParse("1"),
api.ResourceMemory: resource.MustParse("1Gi"),
}
resources2 := api.ResourceList{
api.ResourceCPU: resource.MustParse("2"),
api.ResourceMemory: resource.MustParse("2Gi"),
}
tests := []struct {
name string
resources api.ResourceRequirements
resourcesAllocated api.ResourceList
expectError bool
}{
{
name: "create new pod - resource allocation not set",
resources: api.ResourceRequirements{Requests: resources1, Limits: resources1},
resourcesAllocated: nil,
expectError: true,
},
{
name: "create new pod - resource allocation equals desired resources",
resources: api.ResourceRequirements{Requests: resources1, Limits: resources1},
resourcesAllocated: resources1,
expectError: false,
},
{
name: "create new pod - resource allocation exceeds desired resources",
resources: api.ResourceRequirements{Requests: resources1, Limits: resources1},
resourcesAllocated: resources2,
expectError: true,
},
//TODO: more unit tests and negative tests
//TODO: Add multi-container tests. Add tests for adding new container via patch
}
for _, tc := range tests {
pod.Spec.Containers[0].Resources = tc.resources
pod.Spec.Containers[0].ResourcesAllocated = tc.resourcesAllocated
err := handler.Validate(admission.NewAttributesRecord(&pod, nil, api.Kind("Pod").WithVersion("version"),
pod.Tenant, pod.Namespace, pod.Name, api.Resource("pods").WithVersion("version"), "",
admission.Create, nil, false, nil), nil)
if tc.expectError && err == nil {
t.Fatal(fmt.Sprintf("Test: %s - missing expected error", tc.name))
}
if !tc.expectError && err != nil {
t.Fatal(fmt.Sprintf("Test: %s - received unexpected error %+v", tc.name, err))
}
}
} | explode_data.jsonl/16636 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 901
} | [
2830,
3393,
17926,
4021,
1155,
353,
8840,
836,
8,
341,
16867,
4565,
70,
266,
57824,
287,
4202,
13859,
42318,
16014,
2271,
1155,
11,
4094,
12753,
13275,
13859,
42318,
11,
4419,
5337,
17371,
23527,
18612,
59684,
11,
830,
8,
741,
56623,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestStoreBySafeIndex(t *testing.T) {
st, closer := makeStore()
defer closer()
_ , err := st.Set(schema.KeyValue{Key: []byte(`myFirstElementKey`), Value: []byte(`firstValue`)})
assert.NoError(t, err)
_ , err = st.Set(schema.KeyValue{Key: []byte(`mySecondElementKey`), Value: []byte(`secondValue`)})
assert.NoError(t, err)
_ , err = st.Set(schema.KeyValue{Key: []byte(`myThirdElementKey`), Value: []byte(`thirdValue`)})
assert.NoError(t, err)
sio1 := schema.SafeIndexOptions{
Index: uint64(1),
}
st.tree.WaitUntil(2)
safeItem, err := st.BySafeIndex(sio1)
assert.NoError(t, err)
assert.NotNil(t, safeItem)
assert.Equal(t, []byte(`mySecondElementKey`), safeItem.Item.Key)
assert.Equal(t, []byte(`secondValue`), safeItem.Item.Value)
assert.Equal(t, uint64(1), safeItem.Item.Index)
assert.True(t, safeItem.Proof.Verify(
safeItem.Item.Hash(),
schema.Root{}, // zerovalue signals no prev root
))
// second item with prev root
prevRoot := safeItem.Proof.NewRoot()
sio2 := schema.SafeIndexOptions{
Index: uint64(2),
RootIndex: &schema.Index{
Index: prevRoot.Index,
},
}
safeItem2, err := st.BySafeIndex(sio2)
assert.NoError(t, err)
assert.NotNil(t, safeItem2)
assert.Equal(t, []byte(`myThirdElementKey`), safeItem2.Item.Key)
assert.Equal(t, []byte(`thirdValue`), safeItem2.Item.Value)
assert.Equal(t, uint64(2), safeItem2.Item.Index)
assert.True(t, safeItem2.Proof.Verify(
safeItem2.Item.Hash(),
*prevRoot,
))
lastRoot, err := st.CurrentRoot()
assert.NoError(t, err)
assert.NotNil(t, lastRoot)
assert.Equal(t, *lastRoot, *safeItem2.Proof.NewRoot())
} | explode_data.jsonl/77973 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 663
} | [
2830,
3393,
6093,
1359,
25663,
1552,
1155,
353,
8840,
836,
8,
341,
18388,
11,
12128,
1669,
1281,
6093,
741,
16867,
12128,
2822,
197,
62,
1154,
1848,
1669,
357,
4202,
42735,
9610,
1130,
90,
1592,
25,
3056,
3782,
5809,
2408,
5338,
1691,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetEmoji(t *testing.T) {
th := Setup().InitBasic()
defer th.TearDown()
Client := th.Client
EnableCustomEmoji := *th.App.Config().ServiceSettings.EnableCustomEmoji
defer func() {
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableCustomEmoji = EnableCustomEmoji })
}()
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableCustomEmoji = true })
emoji := &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
newEmoji, resp := Client.CreateEmoji(emoji, utils.CreateTestGif(t, 10, 10), "image.gif")
CheckNoError(t, resp)
emoji, resp = Client.GetEmoji(newEmoji.Id)
CheckNoError(t, resp)
if emoji.Id != newEmoji.Id {
t.Fatal("wrong emoji was returned")
}
_, resp = Client.GetEmoji(model.NewId())
CheckNotFoundStatus(t, resp)
} | explode_data.jsonl/76084 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 327
} | [
2830,
3393,
1949,
92731,
1155,
353,
8840,
836,
8,
341,
70479,
1669,
18626,
1005,
3803,
15944,
741,
16867,
270,
836,
682,
4454,
741,
71724,
1669,
270,
11716,
271,
197,
11084,
10268,
92731,
1669,
353,
339,
5105,
10753,
1005,
1860,
6086,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestZonesListAllIntegrationFilterForNonexistentName(t *testing.T) {
c := client()
zones, err := c.ZonesListAll(ListFilter{
NameFilter: "foo",
})
if err != nil {
t.Error(err)
}
if len(zones) > 0 {
t.Error("Expected ZonesListAll for zones named 'foo' to yield no results")
}
} | explode_data.jsonl/12118 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 116
} | [
2830,
3393,
57,
3154,
852,
2403,
52464,
5632,
2461,
8121,
64085,
675,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
2943,
741,
20832,
3154,
11,
1848,
1669,
272,
13476,
3154,
852,
2403,
10278,
5632,
515,
197,
21297,
5632,
25,
330,
7975,
75... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestAttributes_UpdateFromAttribute(t *testing.T) {
testCases := []testCase{
// Ensure no changes to the span as there is no attributes map.
{
name: "UpdateNoAttributes",
inputAttributes: map[string]pdata.AttributeValue{},
expectedAttributes: map[string]pdata.AttributeValue{},
},
// Ensure the attribute `boo` isn't updated because attribute `foo` isn't present in the span.
{
name: "UpdateKeyNoExistFromAttribute",
inputAttributes: map[string]pdata.AttributeValue{
"boo": pdata.NewAttributeValueString("bob"),
},
expectedAttributes: map[string]pdata.AttributeValue{
"boo": pdata.NewAttributeValueString("bob"),
},
},
// Ensure no updates as the target key `boo` doesn't exists.
{
name: "UpdateKeyNoExistMainAttributed",
inputAttributes: map[string]pdata.AttributeValue{
"foo": pdata.NewAttributeValueString("over there"),
},
expectedAttributes: map[string]pdata.AttributeValue{
"foo": pdata.NewAttributeValueString("over there"),
},
},
// Ensure no updates as the target key `boo` doesn't exists.
{
name: "UpdateKeyFromExistingAttribute",
inputAttributes: map[string]pdata.AttributeValue{
"foo": pdata.NewAttributeValueString("there is a party over here"),
"boo": pdata.NewAttributeValueString("not here"),
},
expectedAttributes: map[string]pdata.AttributeValue{
"foo": pdata.NewAttributeValueString("there is a party over here"),
"boo": pdata.NewAttributeValueString("there is a party over here"),
},
},
}
cfg := &Settings{
Actions: []ActionKeyValue{
{Key: "boo", Action: UPDATE, FromAttribute: "foo"},
},
}
ap, err := NewAttrProc(cfg)
require.Nil(t, err)
require.NotNil(t, ap)
for _, tt := range testCases {
runIndividualTestCase(t, tt, ap)
}
} | explode_data.jsonl/11510 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 666
} | [
2830,
3393,
10516,
47393,
3830,
3907,
1155,
353,
8840,
836,
8,
1476,
18185,
37302,
1669,
3056,
66194,
515,
197,
197,
322,
29279,
902,
4344,
311,
279,
9390,
438,
1052,
374,
902,
8201,
2415,
624,
197,
197,
515,
298,
11609,
25,
2290,
330... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestJetStreamManagement_GetMsg(t *testing.T) {
t.Run("1-node", func(t *testing.T) {
withJSServer(t, testJetStreamManagement_GetMsg)
})
t.Run("3-node", func(t *testing.T) {
withJSCluster(t, "GET", 3, testJetStreamManagement_GetMsg)
})
} | explode_data.jsonl/29163 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 103
} | [
2830,
3393,
35641,
3027,
22237,
13614,
6611,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
16,
39054,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
46948,
41,
1220,
2836,
1155,
11,
1273,
35641,
3027,
22237,
13614,
6611,
340,
197,
35... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFilterExtractFromDNF(t *testing.T) {
store, clean := testkit.CreateMockStore(t)
defer clean()
tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")
tk.MustExec("drop table if exists t")
tk.MustExec("create table t(a int, b int, c int)")
tests := []struct {
exprStr string
result string
}{
{
exprStr: "a = 1 or a = 1 or a = 1",
result: "[eq(test.t.a, 1)]",
},
{
exprStr: "a = 1 or a = 1 or (a = 1 and b = 1)",
result: "[eq(test.t.a, 1)]",
},
{
exprStr: "(a = 1 and a = 1) or a = 1 or b = 1",
result: "[or(or(and(eq(test.t.a, 1), eq(test.t.a, 1)), eq(test.t.a, 1)), eq(test.t.b, 1))]",
},
{
exprStr: "(a = 1 and b = 2) or (a = 1 and b = 3) or (a = 1 and b = 4)",
result: "[eq(test.t.a, 1) or(eq(test.t.b, 2), or(eq(test.t.b, 3), eq(test.t.b, 4)))]",
},
{
exprStr: "(a = 1 and b = 1 and c = 1) or (a = 1 and b = 1) or (a = 1 and b = 1 and c > 2 and c < 3)",
result: "[eq(test.t.a, 1) eq(test.t.b, 1)]",
},
}
ctx := context.Background()
for _, tt := range tests {
sql := "select * from t where " + tt.exprStr
sctx := tk.Session().(sessionctx.Context)
sc := sctx.GetSessionVars().StmtCtx
stmts, err := session.Parse(sctx, sql)
require.NoError(t, err, "error %v, for expr %s", err, tt.exprStr)
require.Len(t, stmts, 1)
ret := &plannercore.PreprocessorReturn{}
err = plannercore.Preprocess(sctx, stmts[0], plannercore.WithPreprocessorReturn(ret))
require.NoError(t, err, "error %v, for resolve name, expr %s", err, tt.exprStr)
p, _, err := plannercore.BuildLogicalPlanForTest(ctx, sctx, stmts[0], ret.InfoSchema)
require.NoError(t, err, "error %v, for build plan, expr %s", err, tt.exprStr)
selection := p.(plannercore.LogicalPlan).Children()[0].(*plannercore.LogicalSelection)
conds := make([]expression.Expression, len(selection.Conditions))
for i, cond := range selection.Conditions {
conds[i] = expression.PushDownNot(sctx, cond)
}
afterFunc := expression.ExtractFiltersFromDNFs(sctx, conds)
sort.Slice(afterFunc, func(i, j int) bool {
return bytes.Compare(afterFunc[i].HashCode(sc), afterFunc[j].HashCode(sc)) < 0
})
require.Equal(t, fmt.Sprintf("%s", afterFunc), tt.result, "wrong result for expr: %s", tt.exprStr)
}
} | explode_data.jsonl/65462 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 994
} | [
2830,
3393,
5632,
28959,
3830,
31264,
37,
1155,
353,
8840,
836,
8,
341,
57279,
11,
4240,
1669,
1273,
8226,
7251,
11571,
6093,
1155,
340,
16867,
4240,
2822,
3244,
74,
1669,
1273,
8226,
7121,
2271,
7695,
1155,
11,
3553,
340,
3244,
74,
5... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestContainsElement(t *testing.T) {
s := NewSet()
e := "dummy"
f := "empty"
s.Add(e)
if !s.Contains(e) || s.Contains(f) {
t.Fail()
}
s.Add(f)
if !s.Contains(e) || !s.Contains(f) {
t.Fail()
}
} | explode_data.jsonl/16577 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 114
} | [
2830,
3393,
23805,
1691,
1155,
353,
8840,
836,
8,
341,
1903,
1669,
1532,
1649,
741,
7727,
1669,
330,
31390,
698,
1166,
1669,
330,
3194,
1837,
1903,
1904,
2026,
340,
743,
753,
82,
11545,
2026,
8,
1369,
274,
11545,
955,
8,
341,
197,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestDownloader(t *testing.T) {
if testing.Short() {
return
}
file, err := Download(log.NewNopLogger(), "")
if err != nil {
t.Fatal(err)
}
if file == "" {
t.Fatal("no DPL file")
}
defer os.RemoveAll(filepath.Dir(file))
if !strings.EqualFold("dpl.txt", filepath.Base(file)) {
t.Errorf("unknown file %s", file)
}
} | explode_data.jsonl/68074 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 148
} | [
2830,
3393,
92698,
1155,
353,
8840,
836,
8,
341,
743,
7497,
55958,
368,
341,
197,
853,
198,
197,
630,
17661,
11,
1848,
1669,
8577,
12531,
7121,
45,
453,
7395,
1507,
14676,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestCreate(t *testing.T) {
storage, _, fakeClient := newStorage(t)
test := registrytest.New(t, fakeClient, storage.Etcd)
ds := newValidDaemonSet()
ds.ObjectMeta = api.ObjectMeta{}
test.TestCreate(
// valid
ds,
// invalid (invalid selector)
&experimental.DaemonSet{
Spec: experimental.DaemonSetSpec{
Selector: map[string]string{},
Template: validDaemonSet.Spec.Template,
},
},
)
} | explode_data.jsonl/56656 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 164
} | [
2830,
3393,
4021,
1155,
353,
8840,
836,
8,
341,
197,
16172,
11,
8358,
12418,
2959,
1669,
501,
5793,
1155,
340,
18185,
1669,
19424,
1944,
7121,
1155,
11,
12418,
2959,
11,
5819,
5142,
83,
4385,
340,
83336,
1669,
501,
4088,
89177,
1649,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCheckSMSGlobalConfigValues(t *testing.T) {
t.Parallel()
checkSMSGlobalConfigValues := GetConfig()
err := checkSMSGlobalConfigValues.LoadConfig(ConfigTestFile)
if err != nil {
t.Errorf("Test failed. checkSMSGlobalConfigValues.LoadConfig: %s", err)
}
err = checkSMSGlobalConfigValues.CheckSMSGlobalConfigValues()
if err != nil {
t.Error(
`Test failed. checkSMSGlobalConfigValues.CheckSMSGlobalConfigValues: Incorrect Return Value`,
)
}
checkSMSGlobalConfigValues.SMS.Username = "Username"
err = checkSMSGlobalConfigValues.CheckSMSGlobalConfigValues()
if err == nil {
t.Error(
"Test failed. checkSMSGlobalConfigValues.CheckSMSGlobalConfigValues: Incorrect Return Value",
)
}
checkSMSGlobalConfigValues.SMS.Username = "1234"
checkSMSGlobalConfigValues.SMS.Contacts[0].Name = "Bob"
checkSMSGlobalConfigValues.SMS.Contacts[0].Number = "12345"
err = checkSMSGlobalConfigValues.CheckSMSGlobalConfigValues()
if err == nil {
t.Error(
"Test failed. checkSMSGlobalConfigValues.CheckSMSGlobalConfigValues: Incorrect Return Value",
)
}
checkSMSGlobalConfigValues.SMS.Contacts = checkSMSGlobalConfigValues.SMS.Contacts[:0]
err = checkSMSGlobalConfigValues.CheckSMSGlobalConfigValues()
if err == nil {
t.Error(
"Test failed. checkSMSGlobalConfigValues.CheckSMSGlobalConfigValues: Incorrect Return Value",
)
}
} | explode_data.jsonl/21899 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 469
} | [
2830,
3393,
3973,
65565,
11646,
2648,
6227,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
25157,
65565,
11646,
2648,
6227,
1669,
2126,
2648,
741,
9859,
1669,
1779,
65565,
11646,
2648,
6227,
13969,
2648,
33687,
2271,
1703,
340,
74... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestStoreOrder(t *testing.T) {
// In the function below, v2 depends on v3 and v4, v4 depends on v3, and v3 depends on store v5.
// storeOrder did not handle this case correctly.
c := testConfig(t)
fun := c.Fun("entry",
Bloc("entry",
Valu("mem0", OpInitMem, types.TypeMem, 0, nil),
Valu("a", OpAdd64, c.config.Types.Int64, 0, nil, "b", "c"), // v2
Valu("b", OpLoad, c.config.Types.Int64, 0, nil, "ptr", "mem1"), // v3
Valu("c", OpNeg64, c.config.Types.Int64, 0, nil, "b"), // v4
Valu("mem1", OpStore, types.TypeMem, 0, c.config.Types.Int64, "ptr", "v", "mem0"), // v5
Valu("mem2", OpStore, types.TypeMem, 0, c.config.Types.Int64, "ptr", "a", "mem1"),
Valu("ptr", OpConst64, c.config.Types.Int64, 0xABCD, nil),
Valu("v", OpConst64, c.config.Types.Int64, 12, nil),
Goto("exit")),
Bloc("exit",
Exit("mem2")))
CheckFunc(fun.f)
order := storeOrder(fun.f.Blocks[0].Values, fun.f.newSparseSet(fun.f.NumValues()), make([]int32, fun.f.NumValues()))
// check that v2, v3, v4 is sorted after v5
var ai, bi, ci, si int
for i, v := range order {
switch v.ID {
case 2:
ai = i
case 3:
bi = i
case 4:
ci = i
case 5:
si = i
}
}
if ai < si || bi < si || ci < si {
t.Logf("Func: %s", fun.f)
t.Errorf("store order is wrong: got %v, want v2 v3 v4 after v5", order)
}
} | explode_data.jsonl/3318 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 680
} | [
2830,
3393,
6093,
4431,
1155,
353,
8840,
836,
8,
341,
197,
322,
758,
279,
729,
3685,
11,
348,
17,
13798,
389,
348,
18,
323,
348,
19,
11,
348,
19,
13798,
389,
348,
18,
11,
323,
348,
18,
13798,
389,
3553,
348,
20,
624,
197,
322,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestFromError(t *testing.T) {
t.Run("input general error", func(t *testing.T) {
err := errors.New("general error")
gst := FromError(err)
assert.Equal(t, codes.Unknown, gst.Code())
assert.Contains(t, gst.Message(), "general")
})
t.Run("input wrap error", func(t *testing.T) {
err := pkgerr.Wrap(ecode.RequestErr, "hh")
gst := FromError(err)
assert.Equal(t, "-400", gst.Message())
})
t.Run("input ecode.Code", func(t *testing.T) {
err := ecode.RequestErr
gst := FromError(err)
//assert.Equal(t, codes.InvalidArgument, gst.Code())
// NOTE: set all grpc.status as Unkown when error is ecode.Codes for compatible
assert.Equal(t, codes.Unknown, gst.Code())
// NOTE: gst.Message == str(ecode.Code) for compatible php leagcy code
assert.Equal(t, err.Message(), gst.Message())
})
t.Run("input raw Canceled", func(t *testing.T) {
gst := FromError(context.Canceled)
assert.Equal(t, codes.Unknown, gst.Code())
assert.Equal(t, "-498", gst.Message())
})
t.Run("input raw DeadlineExceeded", func(t *testing.T) {
gst := FromError(context.DeadlineExceeded)
assert.Equal(t, codes.Unknown, gst.Code())
assert.Equal(t, "-504", gst.Message())
})
t.Run("input ecode.Status", func(t *testing.T) {
m := ×tamp.Timestamp{Seconds: time.Now().Unix()}
err, _ := ecode.Error(ecode.Unauthorized, "unauthorized").WithDetails(m)
gst := FromError(err)
//assert.Equal(t, codes.Unauthenticated, gst.Code())
// NOTE: set all grpc.status as Unkown when error is ecode.Codes for compatible
assert.Equal(t, codes.Unknown, gst.Code())
assert.Len(t, gst.Details(), 1)
details := gst.Details()
assert.IsType(t, err.Proto(), details[0])
})
} | explode_data.jsonl/58602 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 680
} | [
2830,
3393,
3830,
1454,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
1355,
4586,
1465,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
9859,
1669,
5975,
7121,
445,
24595,
1465,
1138,
197,
3174,
267,
1669,
5542,
1454,
3964,
692,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIsContainerEntry(t *testing.T) {
source := config.NewLogSource("", &config.LogsConfig{})
tailer := NewTailer(source, nil)
var entry *sdjournal.JournalEntry
entry = &sdjournal.JournalEntry{
Fields: map[string]string{
containerIDKey: "0123456789",
},
}
assert.True(t, tailer.isContainerEntry(entry))
entry = &sdjournal.JournalEntry{}
assert.False(t, tailer.isContainerEntry(entry))
} | explode_data.jsonl/26854 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 149
} | [
2830,
3393,
3872,
4502,
5874,
1155,
353,
8840,
836,
8,
341,
47418,
1669,
2193,
7121,
2201,
3608,
19814,
609,
1676,
5247,
82,
2648,
37790,
3244,
38782,
1669,
1532,
51,
38782,
12437,
11,
2092,
692,
2405,
4343,
353,
13446,
42518,
3503,
493... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPullRequestDecorator_CommentQualityGate_CheckErrorOnSendRequest(t *testing.T) {
wantError := errors.New("failure")
mockConn := &mocks.MockConnection{
PostMock: func(endpoint string, content []byte, contentType string) (<-chan []byte, <-chan error) {
chOut := make(chan []byte, 1)
chErr := make(chan error, 1)
defer close(chOut)
defer close(chErr)
chErr <- wantError
return chOut, chErr
},
}
mockEngine := &mocks.MockEngine{
GetQualityReportTemplateMock: func(a ...interface{}) string {
return "fake template"
},
ProcessTemplateMock: func(template string, dataSource interface{}) (string, error) {
return "Is is a fake report", nil
},
}
decorator := NewPullRequestDecorator(mockConn, mockEngine, "project-test", "repo-test")
gotError := decorator.CommentQualityGate(sonar.QualityGate{}, "")
if gotError != wantError {
t.Fail()
}
} | explode_data.jsonl/5655 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 327
} | [
2830,
3393,
36068,
1900,
47951,
16946,
478,
33074,
42318,
28188,
1454,
1925,
11505,
1900,
1155,
353,
8840,
836,
8,
341,
50780,
1454,
1669,
5975,
7121,
445,
28939,
5130,
77333,
9701,
1669,
609,
16712,
82,
24664,
4526,
515,
197,
197,
4133,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMWStatic(t *testing.T) {
rankStr := strconv.Itoa(rand.Intn(100));
s := omg.New();
i := 1;
s.Handle("/", func(ctx *omg.Context) (string, error) {
sessionPlugin, _ := ctx.Plugin("session");
sessionInstance := sessionPlugin.(*Session);
sessionValue, _ := sessionInstance.Get();
if i == 1 {
sessionInstance.Set(rankStr);
}
i++;
if sessionValue == nil {
return "", nil;
}
return sessionValue.(string), nil;
}, omg.MethodGet);
s.Use(MW);
go s.Start("12355");
clt := http.Client{};
url := "http://127.0.0.1:12355/";
resp, _ := clt.Get(url);
content, _ := ioutil.ReadAll(resp.Body);
defer func() {
clt.CloseIdleConnections();
s.Close();
}();
if string(content) != "" {
t.Errorf("mw/session first request value is not empty")
}
reqest, _ := http.NewRequest("GET", url, nil)
originCookie := resp.Header["Set-Cookie"];
reqest.Header.Add("Cookie", originCookie[0]);
resp2, _ := clt.Do(reqest);
content2, _ := ioutil.ReadAll(resp2.Body);
if string(content2) != rankStr {
t.Errorf("mw/session second request value is not match")
}
} | explode_data.jsonl/5050 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 440
} | [
2830,
3393,
54173,
11690,
1155,
353,
8840,
836,
8,
341,
7000,
1180,
2580,
1669,
33317,
64109,
37595,
7371,
77,
7,
16,
15,
15,
1106,
1903,
1669,
7861,
70,
7121,
543,
8230,
1669,
220,
16,
280,
1903,
31421,
35460,
2915,
7502,
353,
316,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestMatchBody(t *testing.T) {
cases := []struct {
value string
body string
matches bool
}{
{"foo bar", "foo bar\n", true},
{"foo", "foo bar\n", true},
{"f[o]+", "foo\n", true},
{`"foo"`, `{"foo":"bar"}\n`, true},
{`{"foo":"bar"}`, `{"foo":"bar"}\n`, true},
{`{"foo":"foo"}`, `{"foo":"bar"}\n`, false},
{`{"foo":"bar","bar":"foo"}`, `{"bar":"foo","foo":"bar"}`, true},
{`{"bar":"foo","foo":{"two":"three","three":"two"}}`, `{"foo":{"three":"two","two":"three"},"bar":"foo"}`, true},
}
for _, test := range cases {
req := &http.Request{Body: createReadCloser([]byte(test.body))}
ereq := &Request{BodyBuffer: []byte(test.value)}
matches, err := MatchBody(req, ereq)
st.Expect(t, err, nil)
st.Expect(t, matches, test.matches)
}
} | explode_data.jsonl/9203 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 330
} | [
2830,
3393,
8331,
5444,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
16309,
256,
914,
198,
197,
35402,
262,
914,
198,
197,
2109,
9118,
1807,
198,
197,
59403,
197,
197,
4913,
7975,
3619,
497,
330,
7975,
3619,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestPutChaincode(t *testing.T) {
ccname := ""
ccver := "1.0"
ccpath := "github.com/hyperledger/fabric/examples/chaincode/go/example02/cmd"
ccinfoFs := &mockCCInfoFSStorageMgrImpl{CCMap: map[string]CCPackage{}}
NewCCInfoCache(ccinfoFs)
//错误案例1:ccname为空
//创建要放置的DEP规范
_, err := getDepSpec(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")})
assert.NoError(t, err)
//错误案例2:ccver为空
ccname = "foo"
ccver = ""
_, err = getDepSpec(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")})
assert.NoError(t, err)
//错误案例3:ccfs.putchaincode返回错误
ccinfoFs = &mockCCInfoFSStorageMgrImpl{CCMap: map[string]CCPackage{}}
NewCCInfoCache(ccinfoFs)
ccname = "foo"
ccver = "1.0"
_, err = getDepSpec(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")})
assert.NoError(t, err)
} | explode_data.jsonl/10594 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 454
} | [
2830,
3393,
19103,
18837,
1851,
1155,
353,
8840,
836,
8,
341,
63517,
606,
1669,
8389,
63517,
423,
1669,
330,
16,
13,
15,
698,
63517,
2343,
1669,
330,
5204,
905,
7530,
39252,
50704,
6663,
28897,
67020,
14,
8819,
1851,
25525,
65182,
15,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBootstrapClusterRoles(t *testing.T) {
roles := bootstrappolicy.GetBootstrapClusterRoles()
list := &api.List{}
for i := range roles {
list.Items = append(list.Items, &roles[i])
}
testObjects(t, list, "bootstrap_cluster_roles.yaml")
} | explode_data.jsonl/10200 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 99
} | [
2830,
3393,
45511,
28678,
25116,
1155,
353,
8840,
836,
8,
341,
197,
14643,
1669,
10459,
495,
676,
8018,
2234,
45511,
28678,
25116,
741,
14440,
1669,
609,
2068,
5814,
16094,
2023,
600,
1669,
2088,
12783,
341,
197,
14440,
12054,
284,
8737,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestParseMonthYearToDate(t *testing.T) {
type args struct {
s string
}
t1 := args{"January 2006"}
t2 := args{"December 2010"}
t3 := args{"March 2019"}
tests := []struct {
name string
args args
want time.Time
}{
{"string March 2020 to datetime", t1, time.Date(2006, time.January, 01, 00, 00, 00, 00, time.UTC)},
{"string February2006 to datetime", t2, time.Date(2010, time.December, 01, 0, 0, 0, 0, time.UTC)},
{"string December2020 to datetime", t3, time.Date(2019, time.March, 01, 0, 0, 0, 0, time.UTC)},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
want := tt.want
actual := parseMonthYearToDate(tt.args.s)
assert.Equal(t, want, actual, "The string monthYear should be the same as the datetime.")
})
}
} | explode_data.jsonl/32663 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 304
} | [
2830,
3393,
14463,
11318,
9490,
76054,
1155,
353,
8840,
836,
8,
341,
13158,
2827,
2036,
341,
197,
1903,
914,
198,
197,
630,
3244,
16,
1669,
2827,
4913,
32227,
220,
17,
15,
15,
21,
16707,
3244,
17,
1669,
2827,
4913,
32146,
220,
17,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestShow(t *testing.T) {
node := &ast.ShowStmt{}
tps := []ast.ShowStmtType{
ast.ShowEngines,
ast.ShowDatabases,
ast.ShowTables,
ast.ShowTableStatus,
ast.ShowColumns,
ast.ShowWarnings,
ast.ShowCharset,
ast.ShowVariables,
ast.ShowStatus,
ast.ShowCollation,
ast.ShowCreateTable,
ast.ShowCreateUser,
ast.ShowGrants,
ast.ShowTriggers,
ast.ShowProcedureStatus,
ast.ShowIndex,
ast.ShowProcessList,
ast.ShowCreateDatabase,
ast.ShowEvents,
ast.ShowMasterStatus,
ast.ShowBackups,
ast.ShowRestores,
}
for _, tp := range tps {
node.Tp = tp
schema, _ := buildShowSchema(node, false, false)
for _, col := range schema.Columns {
require.Greater(t, col.RetType.Flen, 0)
}
}
} | explode_data.jsonl/25784 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 324
} | [
2830,
3393,
7812,
1155,
353,
8840,
836,
8,
341,
20831,
1669,
609,
559,
9060,
31063,
16094,
73423,
82,
1669,
3056,
559,
9060,
31063,
929,
515,
197,
88836,
9060,
4106,
1543,
345,
197,
88836,
9060,
35,
23822,
345,
197,
88836,
9060,
21670,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestWriteErrorsWithLabels(t *testing.T) {
clientOpts := options.Client().SetRetryWrites(false).SetWriteConcern(mtest.MajorityWc).
SetReadConcern(mtest.MajorityRc)
mtOpts := mtest.NewOptions().ClientOptions(clientOpts).MinServerVersion("4.0").Topologies(mtest.ReplicaSet).
CreateClient(false)
mt := mtest.New(t, mtOpts)
defer mt.Close()
label := "ExampleError"
mt.Run("InsertMany errors with label", func(mt *mtest.T) {
mt.SetFailPoint(mtest.FailPoint{
ConfigureFailPoint: "failCommand",
Mode: mtest.FailPointMode{
Times: 1,
},
Data: mtest.FailPointData{
FailCommands: []string{"insert"},
WriteConcernError: &mtest.WriteConcernErrorData{
Code: 100,
ErrorLabels: &[]string{label},
},
},
})
_, err := mt.Coll.InsertMany(mtest.Background,
[]interface{}{
bson.D{
{"a", 1},
},
bson.D{
{"a", 2},
},
})
assert.NotNil(mt, err, "expected non-nil error, got nil")
we, ok := err.(mongo.BulkWriteException)
assert.True(mt, ok, "expected mongo.BulkWriteException, got %T", err)
assert.True(mt, we.HasErrorLabel(label), "expected error to have label: %v", label)
})
mt.Run("WriteException with label", func(mt *mtest.T) {
mt.SetFailPoint(mtest.FailPoint{
ConfigureFailPoint: "failCommand",
Mode: mtest.FailPointMode{
Times: 1,
},
Data: mtest.FailPointData{
FailCommands: []string{"delete"},
WriteConcernError: &mtest.WriteConcernErrorData{
Code: 100,
ErrorLabels: &[]string{label},
},
},
})
_, err := mt.Coll.DeleteMany(mtest.Background, bson.D{{"a", 1}})
assert.NotNil(mt, err, "expected non-nil error, got nil")
we, ok := err.(mongo.WriteException)
assert.True(mt, ok, "expected mongo.WriteException, got %T", err)
assert.True(mt, we.HasErrorLabel(label), "expected error to have label: %v", label)
})
mt.Run("BulkWriteException with label", func(mt *mtest.T) {
mt.SetFailPoint(mtest.FailPoint{
ConfigureFailPoint: "failCommand",
Mode: mtest.FailPointMode{
Times: 1,
},
Data: mtest.FailPointData{
FailCommands: []string{"delete"},
WriteConcernError: &mtest.WriteConcernErrorData{
Code: 100,
ErrorLabels: &[]string{label},
},
},
})
models := []mongo.WriteModel{
&mongo.InsertOneModel{bson.D{{"a", 2}}},
&mongo.DeleteOneModel{bson.D{{"a", 2}}, nil, nil},
}
_, err := mt.Coll.BulkWrite(mtest.Background, models)
assert.NotNil(mt, err, "expected non-nil error, got nil")
we, ok := err.(mongo.BulkWriteException)
assert.True(mt, ok, "expected mongo.BulkWriteException, got %T", err)
assert.True(mt, we.HasErrorLabel(label), "expected error to have label: %v", label)
})
} | explode_data.jsonl/8328 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1155
} | [
2830,
3393,
7985,
13877,
2354,
23674,
1155,
353,
8840,
836,
8,
341,
25291,
43451,
1669,
2606,
11716,
1005,
1649,
51560,
93638,
3576,
568,
1649,
7985,
62142,
1255,
1944,
1321,
3035,
487,
54,
66,
4292,
197,
22212,
4418,
62142,
1255,
1944,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestExportCreatesFileProperly(t *testing.T) {
tempDir, err := setupTempDir()
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(tempDir)
ex := Exporter{
Source: "testdata/examplebun",
Destination: filepath.Join(tempDir, "random-directory", "examplebun-whatev.tgz"),
Thin: true,
Logs: filepath.Join(tempDir, "export-logs"),
Unsigned: true,
Loader: loader.NewDetectingLoader(),
}
if err := ex.Export(); err == nil {
t.Error("Expected path does not exist error, got no error")
}
if err := os.MkdirAll(filepath.Join(tempDir, "random-directory"), 0755); err != nil {
t.Fatal(err)
}
if err := ex.Export(); err != nil {
t.Errorf("Expected no error, got error: %s", err)
}
expectedFile := filepath.Join(tempDir, "random-directory", "examplebun-whatev.tgz")
_, err = os.Stat(expectedFile)
if err != nil && os.IsNotExist(err) {
t.Errorf("Expected %s to exist but was not created", expectedFile)
} else if err != nil {
t.Errorf("Error with compressed bundle archive: %v", err)
}
} | explode_data.jsonl/27760 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 421
} | [
2830,
3393,
16894,
54868,
1703,
1336,
712,
398,
1155,
353,
8840,
836,
8,
341,
16280,
6184,
11,
1848,
1669,
6505,
12151,
6184,
741,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
532,
16867,
2643,
84427,
9758,
6184,
692,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func TestAddConjDistributiveInt64(t *testing.T) {
f := func(x, y *Int64) bool {
// t.Logf("x = %v, y = %v", x, y)
l, r := new(Int64), new(Int64)
l.Add(x, y)
l.Conj(l)
r.Add(r.Conj(x), new(Int64).Conj(y))
return l.Equals(r)
}
if err := quick.Check(f, nil); err != nil {
t.Error(err)
}
} | explode_data.jsonl/29673 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 163
} | [
2830,
3393,
2212,
1109,
73,
35,
4789,
6704,
1072,
21,
19,
1155,
353,
8840,
836,
8,
341,
1166,
1669,
2915,
2075,
11,
379,
353,
1072,
21,
19,
8,
1807,
341,
197,
197,
322,
259,
98954,
445,
87,
284,
1018,
85,
11,
379,
284,
1018,
85,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLicenseService_ProtectCode(t *testing.T) {
services := InitMockEnvironment(t)
ls, err := NewLicenseService(services.conf)
assert.NoError(t, err)
services.license.EXPECT().ProtectCode().Return(nil)
err = ls.ProtectCode()
assert.NoError(t, err)
} | explode_data.jsonl/46595 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 89
} | [
2830,
3393,
9827,
1860,
1088,
4640,
439,
2078,
1155,
353,
8840,
836,
8,
341,
1903,
2161,
1669,
15690,
11571,
12723,
1155,
340,
197,
4730,
11,
1848,
1669,
1532,
9827,
1860,
1141,
2161,
13937,
340,
6948,
35699,
1155,
11,
1848,
340,
1903,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMatchedStyle(t *testing.T) {
t.Parallel()
tests := []struct {
sel string
by QueryOption
}{
{`//*[@id="input1"]`, BySearch},
{`body > input[type="number"]:nth-child(1)`, ByQueryAll},
{`body > input[type="number"]:nth-child(1)`, ByQuery},
{`#input1`, ByID},
{`document.querySelector('#input1')`, ByJSPath},
}
for i, test := range tests {
test := test
t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) {
t.Parallel()
ctx, cancel := testAllocate(t, "js.html")
defer cancel()
var styles *css.GetMatchedStylesForNodeReturns
if err := Run(ctx, MatchedStyle(test.sel, &styles, test.by)); err != nil {
t.Fatalf("got error: %v", err)
}
// TODO: Add logic to check if the style returned is true and valid.
})
}
} | explode_data.jsonl/59488 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 327
} | [
2830,
3393,
8331,
291,
2323,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
78216,
1669,
3056,
1235,
341,
197,
1903,
301,
914,
198,
197,
197,
1694,
220,
11361,
5341,
198,
197,
59403,
197,
197,
90,
63,
37318,
307,
428,
1355,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestValidateServerFiles_InvalidInput(t *testing.T) {
var tests = []struct {
name string
filesInput []models.File
}{
{"Invalid File info, missing both",
[]models.File{
models.File{
"",
"",
},
},
},
{"Invalid File info, missing source",
[]models.File{
models.File{
"",
"destinationString",
},
},
},
{"Invalid File info, missing destination",
[]models.File{
models.File{
"sourceString",
"",
},
},
},
}
for _, test := range tests {
output := validateServerFiles(test.filesInput)
if output == true {
t.Errorf("The validateServerFiles succeeded but should have failed validation for: %v", test)
}
}
} | explode_data.jsonl/71213 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 303
} | [
2830,
3393,
17926,
5475,
10809,
62,
7928,
2505,
1155,
353,
8840,
836,
8,
341,
2405,
7032,
284,
3056,
1235,
341,
197,
11609,
981,
914,
198,
197,
74075,
2505,
3056,
6507,
8576,
198,
197,
59403,
197,
197,
4913,
7928,
2887,
3546,
11,
7402... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestCastType(t *testing.T) {
log.SetLevel(log.DebugLevel)
typedef := "object"
res := castType(typedef)
if res != "dbus.ObjectPath" {
t.Fatal(fmt.Sprintf("%s != %s", typedef, res))
}
typedef = "array{objects, properties}"
res = castType(typedef)
if res != "[]dbus.ObjectPath, string" {
t.Fatal(fmt.Sprintf("%s != %s", typedef, res))
}
} | explode_data.jsonl/68476 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 156
} | [
2830,
3393,
18714,
929,
1155,
353,
8840,
836,
8,
1476,
6725,
4202,
4449,
12531,
20345,
4449,
692,
31199,
1669,
330,
1700,
698,
10202,
1669,
6311,
929,
66783,
4219,
692,
743,
592,
961,
330,
78986,
8348,
1820,
1,
341,
197,
3244,
26133,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestDocstore_PutEntity(t *testing.T) {
cases := []struct {
name string
dbFile string
conn string
cid string
put *store.AnchorEntity
}{
{"normal", testdb2, conn2, cid1, ae1},
}
for _, c := range cases {
c := c
t.Run(c.name, func(t *testing.T) {
docs := store.NewDocstore(c.conn)
ctx := context.Background()
ctx, cancelFunc := context.WithTimeout(ctx, 30*time.Second)
defer cancelFunc()
// put
if err := docs.PutEntity(ctx, c.put); err != nil {
t.Error(err)
}
// save
if err := docs.Close(); err != nil {
t.Error(err)
os.Remove(c.dbFile)
t.Skip()
}
// load & get
docs2 := store.NewDocstore(c.conn)
got := &store.AnchorEntity{CID: c.cid}
if err := docs2.GetEntity(ctx, got); err != nil {
t.Error(err)
}
if !reflect.DeepEqual(got, c.put) {
t.Errorf("got %+v but want %+v", got, c.put)
}
// cleanup
docs2.Close()
os.Remove(c.dbFile)
})
}
} | explode_data.jsonl/61309 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 462
} | [
2830,
3393,
9550,
4314,
1088,
332,
3030,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
11609,
256,
914,
198,
197,
20939,
1703,
914,
198,
197,
32917,
256,
914,
198,
197,
1444,
307,
262,
914,
198,
197,
45062,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestAsContentType(t *testing.T) {
r, err := Prepare(mocks.NewRequest(), AsContentType("application/text"))
if err != nil {
fmt.Printf("ERROR: %v", err)
}
if r.Header.Get(headerContentType) != "application/text" {
t.Fatalf("autorest: AsContentType failed to add header (%s=%s)", headerContentType, r.Header.Get(headerContentType))
}
} | explode_data.jsonl/20946 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 132
} | [
2830,
3393,
2121,
29504,
1155,
353,
8840,
836,
8,
972,
7000,
11,
1848,
1669,
31166,
1255,
25183,
75274,
1507,
1634,
29504,
445,
5132,
36271,
26043,
743,
1848,
961,
2092,
972,
197,
11009,
19367,
445,
3682,
25,
1018,
85,
497,
1848,
1218,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_Hoverfly_SetMiddleware_WillErrorIfGivenBadRemote(t *testing.T) {
RegisterTestingT(t)
unit := NewHoverflyWithConfiguration(&Configuration{})
err := unit.SetMiddleware("", "", "[]somemadeupwebsite*&*^&$%^")
Expect(err).ToNot(BeNil())
Expect(unit.Cfg.Middleware.Binary).To(Equal(""))
script, _ := unit.Cfg.Middleware.GetScript()
Expect(script).To(Equal(""))
Expect(unit.Cfg.Middleware.Remote).To(Equal(""))
} | explode_data.jsonl/45387 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 166
} | [
2830,
3393,
2039,
1975,
21642,
14812,
24684,
2763,
483,
1454,
2679,
22043,
17082,
24703,
1155,
353,
8840,
836,
8,
341,
79096,
16451,
51,
1155,
692,
81189,
1669,
1532,
34379,
21642,
2354,
7688,
2099,
7688,
6257,
692,
9859,
1669,
4982,
4202... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_buildRoutesAllowlist(t *testing.T) {
type expectedAllowedRoute struct {
method string
regexString string
}
testCases := []struct {
name string
skipAuthRegex []string
skipAuthRoutes []string
expectedRoutes []expectedAllowedRoute
shouldError bool
}{
{
name: "No skip auth configured",
skipAuthRegex: []string{},
skipAuthRoutes: []string{},
expectedRoutes: []expectedAllowedRoute{},
shouldError: false,
},
{
name: "Only skipAuthRegex configured",
skipAuthRegex: []string{
"^/foo/bar",
"^/baz/[0-9]+/thing",
},
skipAuthRoutes: []string{},
expectedRoutes: []expectedAllowedRoute{
{
method: "",
regexString: "^/foo/bar",
},
{
method: "",
regexString: "^/baz/[0-9]+/thing",
},
},
shouldError: false,
},
{
name: "Only skipAuthRoutes configured",
skipAuthRegex: []string{},
skipAuthRoutes: []string{
"GET=^/foo/bar",
"POST=^/baz/[0-9]+/thing",
"^/all/methods$",
"WEIRD=^/methods/are/allowed",
"PATCH=/second/equals?are=handled&just=fine",
},
expectedRoutes: []expectedAllowedRoute{
{
method: "GET",
regexString: "^/foo/bar",
},
{
method: "POST",
regexString: "^/baz/[0-9]+/thing",
},
{
method: "",
regexString: "^/all/methods$",
},
{
method: "WEIRD",
regexString: "^/methods/are/allowed",
},
{
method: "PATCH",
regexString: "/second/equals?are=handled&just=fine",
},
},
shouldError: false,
},
{
name: "Both skipAuthRegexes and skipAuthRoutes configured",
skipAuthRegex: []string{
"^/foo/bar/regex",
"^/baz/[0-9]+/thing/regex",
},
skipAuthRoutes: []string{
"GET=^/foo/bar",
"POST=^/baz/[0-9]+/thing",
"^/all/methods$",
},
expectedRoutes: []expectedAllowedRoute{
{
method: "",
regexString: "^/foo/bar/regex",
},
{
method: "",
regexString: "^/baz/[0-9]+/thing/regex",
},
{
method: "GET",
regexString: "^/foo/bar",
},
{
method: "POST",
regexString: "^/baz/[0-9]+/thing",
},
{
method: "",
regexString: "^/all/methods$",
},
},
shouldError: false,
},
{
name: "Invalid skipAuthRegex entry",
skipAuthRegex: []string{
"^/foo/bar",
"^/baz/[0-9]+/thing",
"(bad[regex",
},
skipAuthRoutes: []string{},
expectedRoutes: []expectedAllowedRoute{},
shouldError: true,
},
{
name: "Invalid skipAuthRoutes entry",
skipAuthRegex: []string{},
skipAuthRoutes: []string{
"GET=^/foo/bar",
"POST=^/baz/[0-9]+/thing",
"^/all/methods$",
"PUT=(bad[regex",
},
expectedRoutes: []expectedAllowedRoute{},
shouldError: true,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
opts := &options.Options{
SkipAuthRegex: tc.skipAuthRegex,
SkipAuthRoutes: tc.skipAuthRoutes,
}
routes, err := buildRoutesAllowlist(opts)
if tc.shouldError {
assert.Error(t, err)
return
}
assert.NoError(t, err)
for i, route := range routes {
assert.Greater(t, len(tc.expectedRoutes), i)
assert.Equal(t, route.method, tc.expectedRoutes[i].method)
assert.Equal(t, route.pathRegex.String(), tc.expectedRoutes[i].regexString)
}
})
}
} | explode_data.jsonl/36424 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1798
} | [
2830,
3393,
20801,
26653,
18605,
1607,
1155,
353,
8840,
836,
8,
341,
13158,
3601,
35382,
4899,
2036,
341,
197,
42257,
414,
914,
198,
197,
197,
26387,
703,
914,
198,
197,
630,
18185,
37302,
1669,
3056,
1235,
341,
197,
11609,
1843,
914,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestGetContainerID(t *testing.T) {
fakeDocker := &FakeDockerClient{}
fakeDocker.ContainerList = []docker.APIContainers{
{
ID: "foobar",
Names: []string{"/k8s_foo_qux_ns_1234_42"},
},
{
ID: "barbar",
Names: []string{"/k8s_bar_qux_ns_2565_42"},
},
}
fakeDocker.Container = &docker.Container{
ID: "foobar",
}
dockerContainers, err := GetKubeletDockerContainers(fakeDocker, false)
if err != nil {
t.Errorf("Expected no error, Got %#v", err)
}
if len(dockerContainers) != 2 {
t.Errorf("Expected %#v, Got %#v", fakeDocker.ContainerList, dockerContainers)
}
verifyCalls(t, fakeDocker, []string{"list"})
dockerContainer, found, _ := dockerContainers.FindPodContainer("qux_ns", "", "foo")
if dockerContainer == nil || !found {
t.Errorf("Failed to find container %#v", dockerContainer)
}
fakeDocker.ClearCalls()
dockerContainer, found, _ = dockerContainers.FindPodContainer("foobar", "", "foo")
verifyCalls(t, fakeDocker, []string{})
if dockerContainer != nil || found {
t.Errorf("Should not have found container %#v", dockerContainer)
}
} | explode_data.jsonl/11442 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 430
} | [
2830,
3393,
1949,
4502,
915,
1155,
353,
8840,
836,
8,
341,
1166,
726,
35,
13659,
1669,
609,
52317,
35,
13659,
2959,
16094,
1166,
726,
35,
13659,
33672,
852,
284,
3056,
28648,
24922,
74632,
515,
197,
197,
515,
298,
29580,
25,
262,
330,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestAttrsUpdateLDErrorContext(t *testing.T) {
c := setupTest([]string{"update", "attrs", "--host", "orion-ld", "--id", "urn:ngsi-ld:Product:010", "--data", "{\"specialOffer\":{\"value\": true}}", "--context", "[\"http://context\""})
err := attrsUpdate(c, c.Ngsi, c.Client)
if assert.Error(t, err) {
ngsiErr := err.(*ngsierr.NgsiError)
assert.Equal(t, 2, ngsiErr.ErrNo)
assert.Equal(t, "unexpected EOF", ngsiErr.Message)
}
} | explode_data.jsonl/33075 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 191
} | [
2830,
3393,
53671,
4289,
43,
1150,
1275,
1972,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
6505,
2271,
10556,
917,
4913,
2386,
497,
330,
20468,
497,
14482,
3790,
497,
330,
269,
290,
12,
507,
497,
14482,
307,
497,
330,
399,
25,
968,
63... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func Test_StoreRepo_FindAll(t *testing.T) {
s := sample.NewStore()
page := 1
limit := 10
sort := "created_at"
testCases := []struct {
name string
page int
limit int
sort string
expectedErr bool
prepare func(mock sqlmock.Sqlmock)
}{
{
name: "failure_get_all_returns_error",
page: page,
limit: limit,
sort: sort,
expectedErr: true,
prepare: func(mock sqlmock.Sqlmock) {
offset := (page - 1) * limit
query := fmt.Sprintf(`SELECT * FROM stores ORDER BY %s OFFSET %d LIMIT %d`, sort, offset, limit)
mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnError(errors.New("unexpected error"))
},
},
{
name: "failure_get_count_returns_error",
page: page,
limit: limit,
sort: sort,
expectedErr: true,
prepare: func(mock sqlmock.Sqlmock) {
offset := (page - 1) * limit
query := fmt.Sprintf(`SELECT * FROM stores ORDER BY %s OFFSET %d LIMIT %d`, sort, offset, limit)
countQuery := `SELECT count(1) FROM stores`
row := sqlmock.
NewRows([]string{"id", "created_at", "updated_at", "name", "status", "description", "account_id", "category_id", "user_id", "image", "tags", "lat", "lng"}).
AddRow(s.ID, s.CreatedAt, s.UpdatedAt, s.Name, s.Status, s.Description, s.AccountID, s.CategoryID, s.UserID, s.Image, s.Tags, s.Position.Lat, s.Position.Lng)
mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(row)
mock.ExpectQuery(regexp.QuoteMeta(countQuery)).WillReturnError(errors.New("unexpected error"))
},
},
{
name: "success",
page: page,
limit: limit,
sort: sort,
prepare: func(mock sqlmock.Sqlmock) {
offset := (page - 1) * limit
query := fmt.Sprintf(`SELECT * FROM stores ORDER BY %s OFFSET %d LIMIT %d`, sort, offset, limit)
countQuery := `SELECT count(1) FROM stores`
row := sqlmock.
NewRows([]string{"id", "created_at", "updated_at", "name", "status", "description", "account_id", "category_id", "user_id", "image", "tags", "lat", "lng"}).
AddRow(s.ID, s.CreatedAt, s.UpdatedAt, s.Name, s.Status, s.Description, s.AccountID, s.CategoryID, s.UserID, s.Image, s.Tags, s.Position.Lat, s.Position.Lng)
mock.ExpectQuery(regexp.QuoteMeta(query)).WillReturnRows(row)
countRow := sqlmock.NewRows([]string{"count"}).AddRow(1)
mock.ExpectQuery(regexp.QuoteMeta(countQuery)).WillReturnRows(countRow)
},
},
}
for i := range testCases {
tc := testCases[i]
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
db, mock, err := sqlmock.New()
assert.NoError(t, err)
repo := pg.NewStoreRepository(db)
tc.prepare(mock)
res, count, err := repo.FindAll(context.TODO(), tc.sort, tc.limit, tc.page)
if tc.expectedErr {
assert.Error(t, err)
assert.Equal(t, count, int64(0))
assert.Len(t, res, 0)
} else {
assert.NoError(t, err)
assert.Equal(t, count, int64(1))
assert.Len(t, res, 1)
}
})
}
} | explode_data.jsonl/20619 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1358
} | [
2830,
3393,
92684,
25243,
95245,
2403,
1155,
353,
8840,
836,
8,
341,
1903,
1669,
6077,
7121,
6093,
2822,
35272,
1669,
220,
16,
198,
8810,
2353,
1669,
220,
16,
15,
198,
39487,
1669,
330,
7120,
3752,
1837,
18185,
37302,
1669,
3056,
1235,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAnonymousAccess(t *testing.T) {
assert := assert.New(t)
{
// anonymous to access public project
ctl := &projecttesting.Controller{}
mock.OnAnything(ctl, "Get").Return(public, nil)
resource := NewNamespace(public.ProjectID).Resource(rbac.ResourceRepository)
evaluator := NewEvaluator(ctl, NewBuilderForUser(nil, ctl))
assert.True(evaluator.HasPermission(context.TODO(), resource, rbac.ActionPull))
}
{
// anonymous to access private project
ctl := &projecttesting.Controller{}
mock.OnAnything(ctl, "Get").Return(private, nil)
resource := NewNamespace(private.ProjectID).Resource(rbac.ResourceRepository)
evaluator := NewEvaluator(ctl, NewBuilderForUser(nil, ctl))
assert.False(evaluator.HasPermission(context.TODO(), resource, rbac.ActionPull))
}
} | explode_data.jsonl/75360 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 274
} | [
2830,
3393,
32684,
6054,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
692,
197,
515,
197,
197,
322,
22151,
311,
2615,
584,
2390,
198,
197,
197,
12373,
1669,
609,
4987,
8840,
29112,
16094,
197,
77333,
8071,
77303,
7,
123... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNode_ParseNode(t *testing.T) {
_, err := ParseNode("xxx:123")
assert.Error(t, err)
_, err = ParseNode("1.1.1.1123")
assert.Error(t, err)
_, err = ParseNode("1.1.1.1:-1")
assert.Error(t, err)
_, err = ParseNode("1.1.1.1:65536")
assert.Error(t, err)
node, err := ParseNode("1.1.1.1:65535")
assert.NoError(t, err)
node1 := node.(*StatelessNode)
assert.Equal(t, node1.HostIP, "1.1.1.1")
assert.Equal(t, node1.GRPCPort, uint16(65535))
_, err = ParseNode(":123")
assert.Error(t, err)
} | explode_data.jsonl/48085 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 244
} | [
2830,
3393,
1955,
77337,
1955,
1155,
353,
8840,
836,
8,
341,
197,
6878,
1848,
1669,
14775,
1955,
445,
24048,
25,
16,
17,
18,
1138,
6948,
6141,
1155,
11,
1848,
692,
197,
6878,
1848,
284,
14775,
1955,
445,
16,
13,
16,
13,
16,
13,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTimeMicrosLogicalTypeEncode(t *testing.T) {
schema := `{"type": "long", "logicalType": "time-micros"}`
testBinaryDecodeFail(t, schema, []byte(""), "short buffer")
testBinaryEncodeFail(t, schema, "test", "cannot transform to binary time-micros, expected time.Duration")
testBinaryCodecPass(t, schema, 66904022566*time.Microsecond, []byte("\xcc\xf8\xd2\xbc\xf2\x03"))
} | explode_data.jsonl/12009 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 143
} | [
2830,
3393,
1462,
34609,
82,
64312,
929,
32535,
1155,
353,
8840,
836,
8,
341,
1903,
3416,
1669,
1565,
4913,
1313,
788,
330,
4825,
497,
330,
30256,
929,
788,
330,
1678,
1448,
2754,
82,
9207,
3989,
18185,
21338,
32564,
19524,
1155,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMemberRollWithFailedSubjectAccessReview(t *testing.T) {
validator, _, tracker := createMemberRollValidatorTestFixture(smcp)
tracker.AddReactor("create", "subjectaccessreviews", createSubjectAccessReviewReactor(false, false, nil))
roll := newMemberRoll("default", "istio-system", "app-namespace")
response := validator.Handle(ctx, createCreateRequest(roll))
assert.False(response.Allowed, "Expected validator to reject ServiceMeshMemberRoll due to failed SubjectAccessReview check", t)
} | explode_data.jsonl/70266 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 142
} | [
2830,
3393,
9366,
32355,
2354,
9408,
13019,
6054,
19432,
1155,
353,
8840,
836,
8,
341,
197,
16112,
11,
8358,
28331,
1669,
1855,
9366,
32355,
14256,
69356,
55280,
4672,
340,
25583,
9683,
1904,
693,
5621,
445,
3182,
497,
330,
11501,
5211,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCancelMultipleSpotOrdersOverCurrencyLimits(t *testing.T) {
TestSetRealOrderDefaults(t)
request := okgroup.CancelMultipleSpotOrdersRequest{
InstrumentID: spotCurrency,
OrderIDs: []int64{1, 2, 3, 4, 5},
}
_, err := o.CancelMultipleSpotOrders(request)
if err.Error() != "maximum 4 order cancellations for each pair" {
t.Error("Expecting an error when more than 4 orders for a pair supplied", err)
}
} | explode_data.jsonl/30162 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 149
} | [
2830,
3393,
9269,
32089,
47049,
24898,
1918,
26321,
94588,
1155,
353,
8840,
836,
8,
341,
73866,
1649,
12768,
4431,
16273,
1155,
340,
23555,
1669,
5394,
4074,
36491,
32089,
47049,
24898,
1900,
515,
197,
197,
56324,
915,
25,
7702,
26321,
34... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSSLRequireWithRootCert(t *testing.T) {
maybeSkipSSLTests(t)
// Environment sanity check: should fail without SSL
checkSSLSetup(t, "sslmode=disable user=pqgossltest")
bogusRootCertPath := filepath.Join(os.Getenv("PQSSLCERTTEST_PATH"), "bogus_root.crt")
bogusRootCert := "sslrootcert=" + bogusRootCertPath + " "
// Not OK according to the bogus CA
_, err := openSSLConn(t, bogusRootCert+"host=postgres sslmode=require user=pqgossltest")
if err == nil {
t.Fatal("expected error")
}
_, ok := err.(x509.UnknownAuthorityError)
if !ok {
t.Fatalf("expected x509.UnknownAuthorityError, got %s, %#+v", err, err)
}
nonExistentCertPath := filepath.Join(os.Getenv("PQSSLCERTTEST_PATH"), "non_existent.crt")
nonExistentCert := "sslrootcert=" + nonExistentCertPath + " "
// No match on Common Name, but that's OK because we're not validating anything.
_, err = openSSLConn(t, nonExistentCert+"host=127.0.0.1 sslmode=require user=pqgossltest")
if err != nil {
t.Fatal(err)
}
rootCertPath := filepath.Join(os.Getenv("PQSSLCERTTEST_PATH"), "root.crt")
rootCert := "sslrootcert=" + rootCertPath + " "
// No match on Common Name, but that's OK because we're not validating the CN.
_, err = openSSLConn(t, rootCert+"host=127.0.0.1 sslmode=require user=pqgossltest")
if err != nil {
t.Fatal(err)
}
// Everything OK
_, err = openSSLConn(t, rootCert+"host=postgres sslmode=require user=pqgossltest")
if err != nil {
t.Fatal(err)
}
} | explode_data.jsonl/1384 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 572
} | [
2830,
3393,
22594,
17959,
2354,
8439,
36934,
1155,
353,
8840,
836,
8,
341,
2109,
49791,
35134,
22594,
18200,
1155,
340,
197,
322,
11586,
46842,
1779,
25,
1265,
3690,
2041,
25316,
198,
25157,
22594,
21821,
1155,
11,
330,
24635,
8516,
28,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func Test_RenameTable_Migration(t *testing.T) {
argsss := []string{"add", "rt", "old_test123", "new_test123"}
fileName, mm, _ := generateMigration(argsss)
expectedString := `{"id":"` + getID(fileName) + `","up":{"renameTable":[{"oldTableName":"old_test123","newTableName":"new_test123"}]},"down":{"renameTable":[{"oldTableName":"new_test123","newTableName":"old_test123"}]}}`
content1, _ := json.Marshal(mm)
checkError(t, expectedString, string(content1))
} | explode_data.jsonl/22635 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 164
} | [
2830,
3393,
2568,
1840,
2556,
1245,
5033,
1155,
353,
8840,
836,
8,
341,
47903,
84160,
1669,
3056,
917,
4913,
718,
497,
330,
3342,
497,
330,
813,
4452,
16,
17,
18,
497,
330,
931,
4452,
16,
17,
18,
16707,
17661,
675,
11,
9465,
11,
7... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSendRequestResponseNotFound(t *testing.T) {
// Handler returns 404 page not found
server := httptest.NewServer(http.NotFoundHandler())
defer server.Close()
status_code, body, err := sendRequest("POST", server.URL, "token", nil)
assert.Equal(t, 404, status_code)
assert.Contains(t, string(body), "page not found")
assert.Nil(t, err)
} | explode_data.jsonl/32246 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 119
} | [
2830,
3393,
11505,
1900,
2582,
10372,
1155,
353,
8840,
836,
8,
341,
197,
322,
19954,
4675,
220,
19,
15,
19,
2150,
537,
1730,
198,
41057,
1669,
54320,
70334,
7121,
5475,
19886,
67255,
3050,
2398,
16867,
3538,
10421,
2822,
23847,
4136,
11... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSuspendJobJob(t *testing.T) {
responsecommand := v1alpha1.Command{}
responsejob := v1alpha1batch.Job{}
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if strings.HasSuffix(r.URL.Path, "command") {
w.Header().Set("Content-Type", "application/json")
val, err := json.Marshal(responsecommand)
if err == nil {
w.Write(val)
}
} else {
w.Header().Set("Content-Type", "application/json")
val, err := json.Marshal(responsejob)
if err == nil {
w.Write(val)
}
}
})
server := httptest.NewServer(handler)
defer server.Close()
suspendJobFlags.Master = server.URL
suspendJobFlags.Namespace = "test"
suspendJobFlags.JobName = "testjob"
testCases := []struct {
Name string
ExpectValue error
}{
{
Name: "SuspendJob",
ExpectValue: nil,
},
}
for i, testcase := range testCases {
err := SuspendJob()
if err != nil {
t.Errorf("case %d (%s): expected: %v, got %v ", i, testcase.Name, testcase.ExpectValue, err)
}
}
} | explode_data.jsonl/1854 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 432
} | [
2830,
3393,
50,
12758,
12245,
12245,
1155,
353,
8840,
836,
8,
341,
21735,
5631,
1669,
348,
16,
7141,
16,
12714,
16094,
21735,
8799,
1669,
348,
16,
7141,
16,
14049,
45293,
31483,
53326,
1669,
1758,
89164,
18552,
3622,
1758,
37508,
11,
43... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestGoldenWeights(t *testing.T) {
gllGoldenTests, err := filepath.Glob("testdata/wiki_*.gob.gz")
if err != nil {
t.Fatal(err)
}
for _, goldenPath := range gllGoldenTests {
golden := &GoldenTest{}
if err := golden.DecodeFromFile(goldenPath); err != nil {
t.Fatal(err)
}
for _, goldenCase := range golden.Cases {
t.Run(fmt.Sprintf("%s (%s)", golden.Name, goldenCase.Lang), func(t *testing.T) {
for coll, expected := range goldenCase.Weights {
coll := testcollation(t, coll)
input, err := coll.(CollationUCA).Encoding().EncodeFromUTF8(goldenCase.Text)
if err != nil {
t.Fatal(err)
}
result := coll.WeightString(nil, input, 0)
if !bytes.Equal(expected, result) {
t.Errorf("mismatch for collation=%s\noriginal: %s\ninput: %#v\nexpected: %v\nactual: %v",
coll.Name(), string(goldenCase.Text), input, expected, result)
}
}
})
}
}
} | explode_data.jsonl/30979 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 412
} | [
2830,
3393,
59790,
55630,
1155,
353,
8840,
836,
8,
341,
3174,
654,
59790,
18200,
11,
1848,
1669,
26054,
1224,
1684,
445,
92425,
25502,
62,
19922,
70,
674,
20963,
1138,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
630,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestVoteReviser(t *testing.T) {
r := require.New(t)
// make sure the prefix stays constant, they affect the key to store objects to DB
r.Equal(byte(0), _const)
r.Equal(byte(1), _bucket)
r.Equal(byte(2), _voterIndex)
r.Equal(byte(3), _candIndex)
ctrl := gomock.NewController(t)
defer ctrl.Finish()
sm := testdb.NewMockStateManager(ctrl)
_, err := sm.PutState(
&totalBucketCount{count: 0},
protocol.NamespaceOption(StakingNameSpace),
protocol.KeyOption(TotalBucketKey),
)
r.NoError(err)
tests := []struct {
cand address.Address
owner address.Address
amount *big.Int
duration uint32
index uint64
}{
{
identityset.Address(1),
identityset.Address(2),
big.NewInt(2100000000),
21,
0,
},
{
identityset.Address(2),
identityset.Address(3),
big.NewInt(1400000000),
14,
1,
},
{
identityset.Address(3),
identityset.Address(4),
big.NewInt(2500000000),
25,
2,
},
{
identityset.Address(4),
identityset.Address(1),
big.NewInt(3100000000),
31,
3,
},
}
// test loading with no candidate in stateDB
stk, err := NewProtocol(nil, genesis.Default.Staking, nil, genesis.Default.GreenlandBlockHeight)
r.NotNil(stk)
r.NoError(err)
// write a number of buckets into stateDB
for _, e := range tests {
vb := NewVoteBucket(e.cand, e.owner, e.amount, e.duration, time.Now(), true)
index, err := putBucketAndIndex(sm, vb)
r.NoError(err)
r.Equal(index, vb.Index)
}
// load candidates from stateDB and verify
ctx := protocol.WithBlockchainCtx(context.Background(), protocol.BlockchainCtx{
Genesis: genesis.Default,
})
v, err := stk.Start(ctx, sm)
sm.WriteView(protocolID, v)
r.NoError(err)
_, ok := v.(*ViewData)
r.True(ok)
csm, err := NewCandidateStateManager(sm, false)
r.NoError(err)
// load a number of candidates
for _, e := range testCandidates {
r.NoError(csm.Upsert(e.d))
}
r.NoError(csm.Commit())
cands, _, err := getAllCandidates(sm)
r.NoError(err)
candm := make(map[string]*Candidate)
for _, cand := range cands {
candm[cand.Owner.String()] = cand.Clone()
candm[cand.Owner.String()].Votes = new(big.Int)
candm[cand.Owner.String()].SelfStake = new(big.Int)
}
buckets, _, err := getAllBuckets(sm)
r.NoError(err)
cv := genesis.Default.Staking.VoteWeightCalConsts
t.Logf("bucketsLen: %d, cv: %+v", len(buckets), cv)
for _, bucket := range buckets {
r.Equal(bucket.isUnstaked(), false)
cand, ok := candm[bucket.Candidate.String()]
r.Equal(ok, true)
if cand.SelfStakeBucketIdx == bucket.Index {
r.NoError(cand.AddVote(calculateVoteWeight(cv, bucket, true)))
cand.SelfStake = bucket.StakedAmount
} else {
r.NoError(cand.AddVote(calculateVoteWeight(cv, bucket, false)))
}
}
} | explode_data.jsonl/17376 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1182
} | [
2830,
3393,
41412,
693,
2682,
261,
1155,
353,
8840,
836,
8,
341,
7000,
1669,
1373,
7121,
1155,
692,
197,
322,
1281,
2704,
279,
9252,
26558,
6783,
11,
807,
7802,
279,
1376,
311,
3553,
6171,
311,
5952,
198,
7000,
12808,
19738,
7,
15,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func Test_ResetPassword(t *testing.T) {
tests := []struct {
name string
prep func(*testSetup)
password string
passwordConfirm string
jwt string
userId string
wantResCode int
}{
{
name: "should return 400 when password not specified",
passwordConfirm: "testtest",
wantResCode: http.StatusBadRequest,
},
{
name: "should return 400 when password does not match passwordConfirm",
passwordConfirm: "testtest",
password: "testtest2",
wantResCode: http.StatusBadRequest,
},
{
name: "should return 400 when user service returns ErrInvalidID",
passwordConfirm: "testtest",
password: "testtest",
jwt: testAuthToken,
userId: testUserId.Hex(),
prep: func(setup *testSetup) {
setup.mockUService.EXPECT().UpdateUserWithID(setup.testCtx, testUserId.Hex(), gomock.Any()).
Return(services.ErrInvalidID).Times(1)
},
wantResCode: http.StatusBadRequest,
},
{
name: "should return 404 when user service returns ErrNotFound",
passwordConfirm: "testtest",
password: "testtest",
jwt: testAuthToken,
userId: testUserId.Hex(),
prep: func(setup *testSetup) {
setup.mockUService.EXPECT().UpdateUserWithID(setup.testCtx, testUserId.Hex(), gomock.Any()).
Return(services.ErrNotFound).Times(1)
},
wantResCode: http.StatusNotFound,
},
{
name: "should return 500 when user service returns unknown error",
passwordConfirm: "testtest",
password: "testtest",
jwt: testAuthToken,
userId: testUserId.Hex(),
prep: func(setup *testSetup) {
setup.mockUService.EXPECT().UpdateUserWithID(setup.testCtx, testUserId.Hex(), gomock.Any()).
Return(errors.New("service err")).Times(1)
},
wantResCode: http.StatusInternalServerError,
},
{
name: "should return 500 when user service returns unknown error",
passwordConfirm: "testtest",
password: "testtest",
jwt: testAuthToken,
userId: testUserId.Hex(),
prep: func(setup *testSetup) {
setup.mockUService.EXPECT().UpdateUserWithID(setup.testCtx, testUserId.Hex(), gomock.Any()).
Return(errors.New("service err")).Times(1)
},
wantResCode: http.StatusInternalServerError,
},
{
name: "should return 200 when authorizer fails to invalidate token",
passwordConfirm: "testtest",
password: "testtest",
jwt: testAuthToken,
userId: testUserId.Hex(),
prep: func(setup *testSetup) {
setup.mockUService.EXPECT().UpdateUserWithID(setup.testCtx, testUserId.Hex(), gomock.Any()).
Return(nil).Times(1)
setup.mockAuthorizer.EXPECT().InvalidateServiceToken(setup.testCtx, gomock.Any()).
Return(authCommon.ErrInvalidTokenType).Times(1)
},
wantResCode: http.StatusOK,
},
{
name: "should return 200",
passwordConfirm: "testtest",
password: "testtest",
jwt: testAuthToken,
userId: testUserId.Hex(),
prep: func(setup *testSetup) {
setup.mockUService.EXPECT().UpdateUserWithID(setup.testCtx, testUserId.Hex(), gomock.Any()).
Return(nil).Times(1)
setup.mockAuthorizer.EXPECT().InvalidateServiceToken(setup.testCtx, gomock.Any()).
Return(nil).Times(1)
},
wantResCode: http.StatusOK,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
setup := setupTest(t, map[string]string{
environment.JWTSecret: "test",
})
defer setup.ctrl.Finish()
mockRenderPageCall(setup)
if tt.prep != nil {
tt.prep(setup)
}
testutils.AddRequestWithFormParamsToCtx(setup.testCtx, http.MethodPost, map[string]string{
"password": tt.password,
"passwordConfirm": tt.passwordConfirm,
"userId": tt.userId,
})
setup.router.ResetPassword(setup.testCtx)
assert.Equal(t, tt.wantResCode, setup.w.Code)
})
}
} | explode_data.jsonl/32965 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1864
} | [
2830,
3393,
67771,
4876,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
310,
914,
198,
197,
40346,
79,
310,
2915,
4071,
1944,
21821,
340,
197,
58199,
286,
914,
198,
197,
58199,
16728,
914,
198,
197,
12428,
930... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGet(t *testing.T) {
const case1_empty = "/"
const case2_set_header = "/set_header"
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// check method is GET before going to check other features
if r.Method != GET {
t.Errorf("Expected method %q; got %q", GET, r.Method)
}
if r.Header == nil {
t.Errorf("Expected non-nil request Header")
}
switch r.URL.Path {
default:
t.Errorf("No testing for this case yet : %q", r.URL.Path)
case case1_empty:
t.Logf("case %v ", case1_empty)
case case2_set_header:
t.Logf("case %v ", case2_set_header)
if r.Header.Get("API-Key") != "fookey" {
t.Errorf("Expected 'API-Key' == %q; got %q", "fookey", r.Header.Get("API-Key"))
}
}
}))
defer ts.Close()
New().Get(ts.URL + case1_empty).
End()
New().Get(ts.URL+case2_set_header).
Set("API-Key", "fookey").
End()
} | explode_data.jsonl/25463 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 391
} | [
2830,
3393,
1949,
1155,
353,
8840,
836,
8,
341,
4777,
1142,
16,
15124,
284,
80455,
4777,
1142,
17,
2602,
8757,
284,
3521,
746,
8757,
698,
57441,
1669,
54320,
70334,
7121,
5475,
19886,
89164,
18552,
3622,
1758,
37508,
11,
435,
353,
1254,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestPodNodeConstraintsResources(t *testing.T) {
ns := metav1.NamespaceDefault
testconfigs := []struct {
config *podnodeconstraints.PodNodeConstraintsConfig
userinfo user.Info
reviewResponse *authorizationapi.SubjectAccessReviewResponse
}{
{
config: testConfig(),
userinfo: serviceaccount.UserInfo("", "", ""),
reviewResponse: reviewResponse(false, ""),
},
}
testresources := []struct {
resource func(bool) runtime.Object
kind schema.GroupKind
groupresource schema.GroupResource
prefix string
}{
{
resource: replicationController,
kind: kapi.Kind("ReplicationController"),
groupresource: kapi.Resource("replicationcontrollers"),
prefix: "ReplicationController",
},
{
resource: deployment,
kind: extensions.Kind("Deployment"),
groupresource: extensions.Resource("deployments"),
prefix: "Deployment",
},
{
resource: replicaSet,
kind: extensions.Kind("ReplicaSet"),
groupresource: extensions.Resource("replicasets"),
prefix: "ReplicaSet",
},
{
resource: job,
kind: batch.Kind("Job"),
groupresource: batch.Resource("jobs"),
prefix: "Job",
},
{
resource: deploymentConfig,
kind: oapps.Kind("DeploymentConfig"),
groupresource: oapps.Resource("deploymentconfigs"),
prefix: "DeploymentConfig",
},
{
resource: podTemplate,
kind: kapi.Kind("PodTemplate"),
groupresource: kapi.Resource("podtemplates"),
prefix: "PodTemplate",
},
{
resource: podSecurityPolicySubjectReview,
kind: security.Kind("PodSecurityPolicySubjectReview"),
groupresource: security.Resource("podsecuritypolicysubjectreviews"),
prefix: "PodSecurityPolicy",
},
{
resource: podSecurityPolicySelfSubjectReview,
kind: security.Kind("PodSecurityPolicySelfSubjectReview"),
groupresource: security.Resource("podsecuritypolicyselfsubjectreviews"),
prefix: "PodSecurityPolicy",
},
{
resource: podSecurityPolicyReview,
kind: security.Kind("PodSecurityPolicyReview"),
groupresource: security.Resource("podsecuritypolicyreviews"),
prefix: "PodSecurityPolicy",
},
}
testparams := []struct {
nodeselector bool
expectedErrorMsg string
prefix string
}{
{
nodeselector: true,
expectedErrorMsg: "node selection by label(s) [bogus] is prohibited by policy for your role",
prefix: "with nodeSelector",
},
{
nodeselector: false,
expectedErrorMsg: "",
prefix: "without nodeSelector",
},
}
testops := []struct {
operation admission.Operation
}{
{
operation: admission.Create,
},
{
operation: admission.Update,
},
}
for _, tc := range testconfigs {
for _, tr := range testresources {
for _, tp := range testparams {
for _, top := range testops {
var expectedError error
errPrefix := fmt.Sprintf("%s; %s; %s", tr.prefix, tp.prefix, top.operation)
prc := NewPodNodeConstraints(tc.config, nodeidentifier.NewDefaultNodeIdentifier())
prc.(initializer.WantsAuthorizer).SetAuthorizer(fakeAuthorizer(t))
err := prc.(admission.InitializationValidator).ValidateInitialization()
if err != nil {
checkAdmitError(t, err, expectedError, errPrefix)
continue
}
attrs := admission.NewAttributesRecord(tr.resource(tp.nodeselector), nil, tr.kind.WithVersion("version"), ns, "test", tr.groupresource.WithVersion("version"), "", top.operation, tc.userinfo)
if tp.expectedErrorMsg != "" {
expectedError = admission.NewForbidden(attrs, fmt.Errorf(tp.expectedErrorMsg))
}
err = prc.(admission.MutationInterface).Admit(attrs)
checkAdmitError(t, err, expectedError, errPrefix)
}
}
}
}
} | explode_data.jsonl/37022 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1603
} | [
2830,
3393,
23527,
1955,
12925,
11277,
1155,
353,
8840,
836,
8,
341,
84041,
1669,
77520,
16,
46011,
3675,
198,
18185,
53978,
1669,
3056,
1235,
341,
197,
25873,
260,
353,
39073,
3509,
65312,
88823,
1955,
12925,
2648,
198,
197,
19060,
2733,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestInactiveNodes(t *testing.T) {
opts := NewOptions()
opts.Logger = newTestLogger(t)
opts.InactiveProducerTimeout = 200 * time.Millisecond
tcpAddr, httpAddr, nsqlookupd := mustStartLookupd(opts)
defer nsqlookupd.Exit()
lookupdHTTPAddrs := []string{fmt.Sprintf("%s", httpAddr)}
topicName := "inactive_nodes"
conn := mustConnectLookupd(t, tcpAddr)
defer conn.Close()
identify(t, conn, "ip.address", 5000, 5555, "fake-version")
nsq.Register(topicName, "channel1").WriteTo(conn)
_, err := nsq.ReadResponse(conn)
equal(t, err, nil)
producers, _ := lookuputil.GetLookupdProducers(lookupdHTTPAddrs)
equal(t, len(producers), 1)
equal(t, len(producers[0].Topics), 1)
equal(t, producers[0].Topics[0].Topic, topicName)
equal(t, producers[0].Topics[0].Tombstoned, false)
time.Sleep(250 * time.Millisecond)
producers, _ = lookuputil.GetLookupdProducers(lookupdHTTPAddrs)
equal(t, len(producers), 0)
} | explode_data.jsonl/12602 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 374
} | [
2830,
3393,
72214,
12288,
1155,
353,
8840,
836,
8,
341,
64734,
1669,
1532,
3798,
741,
64734,
12750,
284,
501,
2271,
7395,
1155,
340,
64734,
5337,
3028,
45008,
7636,
284,
220,
17,
15,
15,
353,
882,
71482,
198,
3244,
4672,
13986,
11,
17... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetVersionsGithub(t *testing.T) {
// Ensure a clean environment.
tests := []struct {
Name string
Path string
ResponseBody string
ExpectedErr string
ExpectedVer string
}{
{
"RC releases are skipped",
"/no_rc",
`[
{
"url": "https://api.github.com/repos/dapr/dapr/releases/44766923",
"html_url": "https://github.com/dapr/dapr/releases/tag/v1.2.3-rc.1",
"id": 44766926,
"tag_name": "v1.2.3-rc.1",
"target_commitish": "master",
"name": "Dapr Runtime v1.2.3-rc.1",
"draft": false,
"prerelease": false
},
{
"url": "https://api.github.com/repos/dapr/dapr/releases/44766923",
"html_url": "https://github.com/dapr/dapr/releases/tag/v1.2.2",
"id": 44766923,
"tag_name": "v1.2.2",
"target_commitish": "master",
"name": "Dapr Runtime v1.2.2",
"draft": false,
"prerelease": false
}
]
`,
"",
"1.2.2",
},
{
"Malformed JSON",
"/malformed",
"[",
"unexpected end of JSON input",
"",
},
{
"Only RCs",
"/only_rcs",
`[
{
"url": "https://api.github.com/repos/dapr/dapr/releases/44766923",
"html_url": "https://github.com/dapr/dapr/releases/tag/v1.2.3-rc.1",
"id": 44766926,
"tag_name": "v1.2.3-rc.1",
"target_commitish": "master",
"name": "Dapr Runtime v1.2.3-rc.1",
"draft": false,
"prerelease": false
}
] `,
"no releases",
"",
},
{
"Empty json",
"/empty",
"[]",
"no releases",
"",
},
}
m := http.NewServeMux()
s := http.Server{Addr: ":12345", Handler: m}
for _, tc := range tests {
body := tc.ResponseBody
m.HandleFunc(tc.Path, func(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, body)
})
}
go func() {
s.ListenAndServe()
}()
for _, tc := range tests {
t.Run(tc.Name, func(t *testing.T) {
version, err := GetLatestReleaseGithub(fmt.Sprintf("http://localhost:12345%s", tc.Path))
assert.Equal(t, tc.ExpectedVer, version)
if tc.ExpectedErr != "" {
assert.EqualError(t, err, tc.ExpectedErr)
}
})
}
t.Run("error on 404", func(t *testing.T) {
version, err := GetLatestReleaseGithub("http://localhost:12345/non-existant/path")
assert.Equal(t, "", version)
assert.EqualError(t, err, "http://localhost:12345/non-existant/path - 404 Not Found")
})
t.Run("error on bad addr", func(t *testing.T) {
version, err := GetLatestReleaseGithub("http://a.super.non.existant.domain/")
assert.Equal(t, "", version)
assert.Error(t, err)
})
s.Shutdown(context.Background())
} | explode_data.jsonl/11967 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1213
} | [
2830,
3393,
1949,
69015,
78717,
1155,
353,
8840,
836,
8,
341,
197,
322,
29279,
264,
4240,
4573,
382,
78216,
1669,
3056,
1235,
341,
197,
21297,
260,
914,
198,
197,
69640,
260,
914,
198,
197,
197,
29637,
914,
198,
197,
197,
18896,
7747,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMuxRedirectLeadingSlashes(t *testing.T) {
paths := []string{"//foo.txt", "///foo.txt", "/../../foo.txt"}
for _, path := range paths {
req, err := ReadRequest(bufio.NewReader(strings.NewReader("GET " + path + " HTTP/1.1\r\nHost: test\r\n\r\n")))
if err != nil {
t.Errorf("%s", err)
}
mux := NewServeMux()
resp := httptest.NewRecorder()
mux.ServeHTTP(resp, req)
if loc, expected := resp.Header().Get("Location"), "/foo.txt"; loc != expected {
t.Errorf("Expected Location header set to %q; got %q", expected, loc)
return
}
if code, expected := resp.Code, StatusMovedPermanently; code != expected {
t.Errorf("Expected response code of StatusMovedPermanently; got %d", code)
return
}
}
} | explode_data.jsonl/22395 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 292
} | [
2830,
3393,
44,
2200,
17725,
69750,
7442,
14051,
1155,
353,
8840,
836,
8,
341,
197,
21623,
1669,
3056,
917,
4913,
322,
7975,
3909,
497,
330,
2575,
7975,
3909,
497,
3521,
2748,
7975,
3909,
16707,
2023,
8358,
1815,
1669,
2088,
12716,
341,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestVTGateExecuteWithKeyspace(t *testing.T) {
createSandbox(KsTestUnsharded)
hcVTGateTest.Reset()
hcVTGateTest.AddTestTablet("aa", "1.1.1.1", 1001, KsTestUnsharded, "0", topodatapb.TabletType_MASTER, true, 1, nil)
qr, err := rpcVTGate.Execute(context.Background(),
"select id from none",
nil,
KsTestUnsharded,
topodatapb.TabletType_MASTER,
nil,
false,
nil)
if err != nil {
t.Errorf("want nil, got %v", err)
}
if !reflect.DeepEqual(sandboxconn.SingleRowResult, qr) {
t.Errorf("want \n%+v, got \n%+v", sandboxconn.SingleRowResult, qr)
}
_, err = rpcVTGate.Execute(context.Background(),
"select id from none",
nil,
"aa",
topodatapb.TabletType_MASTER,
nil,
false,
nil)
want := "keyspace aa not found in vschema, vtgate: "
if err == nil || err.Error() != want {
t.Errorf("Execute: %v, want %s", err, want)
}
} | explode_data.jsonl/7830 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 386
} | [
2830,
3393,
20457,
42318,
17174,
2354,
8850,
1306,
1155,
353,
8840,
836,
8,
341,
39263,
50,
31536,
16738,
82,
2271,
1806,
927,
20958,
340,
9598,
66,
20457,
42318,
2271,
36660,
741,
9598,
66,
20457,
42318,
2271,
1904,
2271,
2556,
83,
445... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestPriorityQueue_UpdateNominatedPodForNode(t *testing.T) {
objs := []runtime.Object{medPriorityPodInfo.Pod, unschedulablePodInfo.Pod, highPriorityPodInfo.Pod}
q := NewTestQueueWithObjects(context.Background(), newDefaultQueueSort(), objs)
if err := q.Add(medPriorityPodInfo.Pod); err != nil {
t.Errorf("add failed: %v", err)
}
// Update unschedulablePodInfo on a different node than specified in the pod.
q.AddNominatedPod(framework.NewPodInfo(unschedulablePodInfo.Pod), "node5")
// Update nominated node name of a pod on a node that is not specified in the pod object.
q.AddNominatedPod(framework.NewPodInfo(highPriorityPodInfo.Pod), "node2")
expectedNominatedPods := &nominator{
nominatedPodToNode: map[types.UID]string{
medPriorityPodInfo.Pod.UID: "node1",
highPriorityPodInfo.Pod.UID: "node2",
unschedulablePodInfo.Pod.UID: "node5",
},
nominatedPods: map[string][]*framework.PodInfo{
"node1": {medPriorityPodInfo},
"node2": {highPriorityPodInfo},
"node5": {unschedulablePodInfo},
},
}
if diff := cmp.Diff(q.PodNominator, expectedNominatedPods, cmp.AllowUnexported(nominator{}), cmpopts.IgnoreFields(nominator{}, "podLister", "RWMutex")); diff != "" {
t.Errorf("Unexpected diff after adding pods (-want, +got):\n%s", diff)
}
if p, err := q.Pop(); err != nil || p.Pod != medPriorityPodInfo.Pod {
t.Errorf("Expected: %v after Pop, but got: %v", medPriorityPodInfo.Pod.Name, p.Pod.Name)
}
// List of nominated pods shouldn't change after popping them from the queue.
if diff := cmp.Diff(q.PodNominator, expectedNominatedPods, cmp.AllowUnexported(nominator{}), cmpopts.IgnoreFields(nominator{}, "podLister", "RWMutex")); diff != "" {
t.Errorf("Unexpected diff after popping pods (-want, +got):\n%s", diff)
}
// Update one of the nominated pods that doesn't have nominatedNodeName in the
// pod object. It should be updated correctly.
q.AddNominatedPod(highPriorityPodInfo, "node4")
expectedNominatedPods = &nominator{
nominatedPodToNode: map[types.UID]string{
medPriorityPodInfo.Pod.UID: "node1",
highPriorityPodInfo.Pod.UID: "node4",
unschedulablePodInfo.Pod.UID: "node5",
},
nominatedPods: map[string][]*framework.PodInfo{
"node1": {medPriorityPodInfo},
"node4": {highPriorityPodInfo},
"node5": {unschedulablePodInfo},
},
}
if diff := cmp.Diff(q.PodNominator, expectedNominatedPods, cmp.AllowUnexported(nominator{}), cmpopts.IgnoreFields(nominator{}, "podLister", "RWMutex")); diff != "" {
t.Errorf("Unexpected diff after updating pods (-want, +got):\n%s", diff)
}
// Delete a nominated pod that doesn't have nominatedNodeName in the pod
// object. It should be deleted.
q.DeleteNominatedPodIfExists(highPriorityPodInfo.Pod)
expectedNominatedPods = &nominator{
nominatedPodToNode: map[types.UID]string{
medPriorityPodInfo.Pod.UID: "node1",
unschedulablePodInfo.Pod.UID: "node5",
},
nominatedPods: map[string][]*framework.PodInfo{
"node1": {medPriorityPodInfo},
"node5": {unschedulablePodInfo},
},
}
if diff := cmp.Diff(q.PodNominator, expectedNominatedPods, cmp.AllowUnexported(nominator{}), cmpopts.IgnoreFields(nominator{}, "podLister", "RWMutex")); diff != "" {
t.Errorf("Unexpected diff after deleting pods (-want, +got):\n%s", diff)
}
} | explode_data.jsonl/68194 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1255
} | [
2830,
3393,
20555,
7554,
47393,
45,
49515,
23527,
2461,
1955,
1155,
353,
8840,
836,
8,
341,
22671,
82,
1669,
3056,
22255,
8348,
90,
2061,
20555,
23527,
1731,
88823,
11,
6975,
2397,
360,
480,
23527,
1731,
88823,
11,
1550,
20555,
23527,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func TestDeploymentTransform(t *testing.T) {
for _, tt := range updateDeploymentImageTests {
t.Run(tt.name, func(t *testing.T) {
runDeploymentTransformTest(t, &tt)
})
}
} | explode_data.jsonl/17099 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 73
} | [
2830,
3393,
75286,
8963,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17853,
1669,
2088,
2647,
75286,
1906,
18200,
341,
197,
3244,
16708,
47152,
2644,
11,
2915,
1155,
353,
8840,
836,
8,
341,
298,
56742,
75286,
8963,
2271,
1155,
11,
609,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestDeviceCodeReturnsErrorIfEmptyDeviceCode(t *testing.T) {
sender := mocks.NewSender()
body := mocks.NewBody("")
sender.AppendResponse(mocks.NewResponseWithBodyAndStatus(body, http.StatusOK, "OK"))
_, err := InitiateDeviceAuth(sender, TestOAuthConfig, TestClientID, TestResource)
if err != ErrDeviceCodeEmpty {
t.Fatalf("adal: failed to get correct error expected(%s) actual(%s)", ErrDeviceCodeEmpty, err.Error())
}
if body.IsOpen() {
t.Fatalf("response body was left open!")
}
} | explode_data.jsonl/27500 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 172
} | [
2830,
3393,
6985,
2078,
16446,
1454,
2679,
3522,
6985,
2078,
1155,
353,
8840,
836,
8,
341,
1903,
1659,
1669,
68909,
7121,
20381,
741,
35402,
1669,
68909,
7121,
5444,
31764,
1903,
1659,
8982,
2582,
1255,
25183,
7121,
2582,
2354,
5444,
3036... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestRealClock(t *testing.T) {
c := Clock{}
n := c.Now()
now := time.Now()
diff := now.Sub(n)
tolerance := time.Millisecond
if diff > tolerance {
t.Fatalf("Got time %v, want %v to within %v tolerance. Diff was %v.",
n, now, tolerance, diff)
}
} | explode_data.jsonl/25153 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 106
} | [
2830,
3393,
12768,
26104,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
26142,
16094,
9038,
1669,
272,
13244,
741,
80922,
1669,
882,
13244,
741,
80564,
1669,
1431,
12391,
1445,
340,
3244,
31661,
1669,
882,
71482,
198,
743,
3638,
861,
24098,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestFlagNameCompletionInGo(t *testing.T) {
rootCmd := &Command{
Use: "root",
Run: emptyRun,
}
childCmd := &Command{
Use: "childCmd",
Run: emptyRun,
}
rootCmd.AddCommand(childCmd)
rootCmd.Flags().IntP("first", "f", -1, "first flag")
rootCmd.PersistentFlags().BoolP("second", "s", false, "second flag")
childCmd.Flags().String("subFlag", "", "sub flag")
// Test that flag names are not shown if the user has not given the '-' prefix
output, err := executeCommand(rootCmd, ShellCompNoDescRequestCmd, "")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
expected := strings.Join([]string{
"childCmd",
"completion",
"help",
":4",
"Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n")
if output != expected {
t.Errorf("expected: %q, got: %q", expected, output)
}
// Test that flag names are completed
output, err = executeCommand(rootCmd, ShellCompNoDescRequestCmd, "-")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
expected = strings.Join([]string{
"--first",
"-f",
"--second",
"-s",
":4",
"Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n")
if output != expected {
t.Errorf("expected: %q, got: %q", expected, output)
}
// Test that flag names are completed when a prefix is given
output, err = executeCommand(rootCmd, ShellCompNoDescRequestCmd, "--f")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
expected = strings.Join([]string{
"--first",
":4",
"Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n")
if output != expected {
t.Errorf("expected: %q, got: %q", expected, output)
}
// Test that flag names are completed in a sub-cmd
output, err = executeCommand(rootCmd, ShellCompNoDescRequestCmd, "childCmd", "-")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
expected = strings.Join([]string{
"--second",
"-s",
"--subFlag",
":4",
"Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n")
if output != expected {
t.Errorf("expected: %q, got: %q", expected, output)
}
} | explode_data.jsonl/43740 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 765
} | [
2830,
3393,
12135,
675,
33190,
641,
10850,
1155,
353,
8840,
836,
8,
341,
33698,
15613,
1669,
609,
4062,
515,
197,
95023,
25,
330,
2888,
756,
197,
85952,
25,
4287,
6727,
345,
197,
532,
58391,
15613,
1669,
609,
4062,
515,
197,
95023,
25... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestTaskRunWithSpecDescribe_custom_timeout(t *testing.T) {
trs := []*v1alpha1.TaskRun{
tb.TaskRun("tr-custom-timeout", tb.TaskRunNamespace("ns"),
tb.TaskRunSpec(tb.TaskRunTimeout(time.Minute)),
),
}
cs, _ := test.SeedTestData(t, pipelinetest.Data{
TaskRuns: trs,
Namespaces: []*corev1.Namespace{
{
ObjectMeta: metav1.ObjectMeta{
Name: "ns",
},
},
},
})
version := "v1alpha1"
tdc := testDynamic.Options{}
dynamic, err := tdc.Client(
cb.UnstructuredTR(trs[0], version),
)
if err != nil {
t.Errorf("unable to create dynamic client: %v", err)
}
cs.Pipeline.Resources = cb.APIResourceList(version, []string{"taskrun"})
if err != nil {
fmt.Println(err)
}
p := &test.Params{Tekton: cs.Pipeline, Kube: cs.Kube, Dynamic: dynamic}
taskrun := Command(p)
actual, err := test.ExecuteCommand(taskrun, "desc", "tr-custom-timeout", "-n", "ns")
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
golden.Assert(t, actual, fmt.Sprintf("%s.golden", t.Name()))
} | explode_data.jsonl/77401 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 447
} | [
2830,
3393,
6262,
51918,
8327,
74785,
15875,
20537,
1155,
353,
8840,
836,
8,
341,
25583,
82,
1669,
29838,
85,
16,
7141,
16,
28258,
6727,
515,
197,
62842,
28258,
6727,
445,
376,
36898,
7246,
411,
497,
16363,
28258,
6727,
22699,
445,
4412... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestArg(t *testing.T) {
b := newBuilderWithMockBackend()
sb := newDispatchRequest(b, '`', nil, newBuildArgs(make(map[string]*string)), newStagesBuildResults())
argName := "foo"
argVal := "bar"
cmd := &instructions.ArgCommand{Key: argName, Value: &argVal}
err := dispatch(sb, cmd)
require.NoError(t, err)
expected := map[string]string{argName: argVal}
assert.Equal(t, expected, sb.state.buildArgs.GetAllAllowed())
} | explode_data.jsonl/34877 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 159
} | [
2830,
3393,
2735,
1155,
353,
8840,
836,
8,
341,
2233,
1669,
501,
3297,
2354,
11571,
29699,
741,
24842,
1669,
501,
11283,
1900,
1883,
11,
55995,
516,
2092,
11,
501,
11066,
4117,
36944,
9147,
14032,
8465,
917,
5731,
501,
623,
1134,
11066,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRateSetter_ErrorHandling(t *testing.T) {
localID := identity.GenerateLocalIdentity()
localNode := identity.New(localID.PublicKey())
tangle := newTestTangle(Identity(localID), RateSetterConfig(testRateSetterParams))
defer tangle.Shutdown()
rateSetter := NewRateSetter(tangle)
defer rateSetter.Shutdown()
messageDiscarded := make(chan MessageID, 1)
discardedCounter := events.NewClosure(func(id MessageID) { messageDiscarded <- id })
rateSetter.Events.MessageDiscarded.Attach(discardedCounter)
msg := NewMessage(
[]MessageID{EmptyMessageID},
[]MessageID{},
time.Now(),
localNode.PublicKey(),
0,
payload.NewGenericDataPayload(make([]byte, MaxLocalQueueSize)),
0,
ed25519.Signature{},
)
assert.NoError(t, rateSetter.Issue(msg))
assert.Eventually(t, func() bool {
select {
case id := <-messageDiscarded:
return assert.Equal(t, msg.ID(), id)
default:
return false
}
}, 1*time.Second, 10*time.Millisecond)
} | explode_data.jsonl/39584 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 349
} | [
2830,
3393,
11564,
44294,
28651,
38606,
1155,
353,
8840,
836,
8,
341,
8854,
915,
1669,
9569,
57582,
7319,
18558,
741,
8854,
1955,
1669,
9569,
7121,
18082,
915,
49139,
1592,
12367,
3244,
4044,
1669,
501,
2271,
51,
4044,
7,
18558,
18082,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestHandlerErrors(t *testing.T) {
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
tests := []struct {
desc string
format tchannel.Format
headers []byte
wantHeaders map[string]string
newResponseWriter func(inboundCallResponse, tchannel.Format, headerCase) responseWriter
recorder recorder
wantLogLevel zapcore.Level
wantLogMessage string
wantErrMessage string
}{
{
desc: "test tchannel json handler",
format: tchannel.JSON,
headers: []byte(`{"Rpc-Header-Foo": "bar"}`),
wantHeaders: map[string]string{"rpc-header-foo": "bar"},
newResponseWriter: newHandlerWriter,
recorder: newResponseRecorder(),
},
{
desc: "test tchannel thrift handler",
format: tchannel.Thrift,
headers: []byte{
0x00, 0x01, // 1 header
0x00, 0x03, 'F', 'o', 'o', // Foo
0x00, 0x03, 'B', 'a', 'r', // Bar
},
wantHeaders: map[string]string{"foo": "Bar"},
newResponseWriter: newHandlerWriter,
recorder: newResponseRecorder(),
},
{
desc: "test responseWriter.Close() failure logging",
format: tchannel.JSON,
headers: []byte(`{"Rpc-Header-Foo": "bar"}`),
wantHeaders: map[string]string{"rpc-header-foo": "bar"},
newResponseWriter: newFaultyHandlerWriter,
recorder: newResponseRecorder(),
wantLogLevel: zapcore.ErrorLevel,
wantLogMessage: "responseWriter failed to close",
wantErrMessage: "faultyHandlerWriter failed to close",
},
{
desc: "test SendSystemError() failure logging",
format: tchannel.JSON,
headers: []byte(`{"Rpc-Header-Foo": "bar"}`),
wantHeaders: map[string]string{"rpc-header-foo": "bar"},
newResponseWriter: newFaultyHandlerWriter,
recorder: newFaultyResponseRecorder(),
wantLogLevel: zapcore.ErrorLevel,
wantLogMessage: "SendSystemError failed",
wantErrMessage: "SendSystemError failure",
},
}
for _, tt := range tests {
core, logs := observer.New(zapcore.ErrorLevel)
rpcHandler := transporttest.NewMockUnaryHandler(mockCtrl)
router := transporttest.NewMockRouter(mockCtrl)
spec := transport.NewUnaryHandlerSpec(rpcHandler)
tchHandler := handler{router: router, logger: zap.New(core).Named("tchannel"), newResponseWriter: tt.newResponseWriter}
router.EXPECT().Choose(gomock.Any(), routertest.NewMatcher().
WithService("service").
WithProcedure("hello"),
).Return(spec, nil)
rpcHandler.EXPECT().Handle(
transporttest.NewContextMatcher(t),
transporttest.NewRequestMatcher(t,
&transport.Request{
Caller: "caller",
Service: "service",
Transport: "tchannel",
Headers: transport.HeadersFromMap(tt.wantHeaders),
Encoding: transport.Encoding(tt.format),
Procedure: "hello",
ShardKey: "shard",
RoutingKey: "routekey",
RoutingDelegate: "routedelegate",
Body: bytes.NewReader([]byte("world")),
}),
gomock.Any(),
).Return(nil)
respRecorder := tt.recorder
ctx, cancel := context.WithTimeout(context.Background(), testtime.Second)
defer cancel()
tchHandler.handle(ctx, &fakeInboundCall{
service: "service",
caller: "caller",
format: tt.format,
method: "hello",
shardkey: "shard",
routingkey: "routekey",
routingdelegate: "routedelegate",
arg2: tt.headers,
arg3: []byte("world"),
resp: respRecorder,
})
getLog := func() observer.LoggedEntry {
entries := logs.TakeAll()
return entries[0]
}
if tt.wantLogMessage != "" {
log := getLog()
logContext := log.ContextMap()
assert.Equal(t, tt.wantLogLevel, log.Entry.Level, "Unexpected log level")
assert.Equal(t, tt.wantLogMessage, log.Entry.Message, "Unexpected log message written")
assert.Equal(t, tt.wantErrMessage, logContext["error"], "Unexpected error message")
assert.Equal(t, "tchannel", log.LoggerName, "Unexpected logger name")
assert.Error(t, respRecorder.SystemError(), "Error expected with logging")
}
}
} | explode_data.jsonl/53837 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1907
} | [
2830,
3393,
3050,
13877,
1155,
353,
8840,
836,
8,
341,
77333,
15001,
1669,
342,
316,
1176,
7121,
2051,
1155,
340,
16867,
7860,
15001,
991,
18176,
2822,
78216,
1669,
3056,
1235,
341,
197,
41653,
1060,
914,
198,
197,
59416,
310,
259,
1011... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestApp_CreateModifyDeleteCDP(t *testing.T) {
// Setup
mapp, keeper := setUpMockAppWithoutGenesis()
genAccs, addrs, _, privKeys := mock.CreateGenAccounts(1, cs(c("xrp", 100)))
testAddr := addrs[0]
testPrivKey := privKeys[0]
mock.SetGenesis(mapp, genAccs)
// setup pricefeed, TODO can this be shortened a bit?
header := abci.Header{Height: mapp.LastBlockHeight() + 1}
mapp.BeginBlock(abci.RequestBeginBlock{Header: header})
ctx := mapp.BaseApp.NewContext(false, header)
keeper.pricefeed.AddAsset(ctx, "xrp", "xrp test")
keeper.pricefeed.SetPrice(
ctx, sdk.AccAddress{}, "xrp",
sdk.MustNewDecFromStr("1.00"),
sdk.NewInt(10))
keeper.pricefeed.SetCurrentPrices(ctx)
mapp.EndBlock(abci.RequestEndBlock{})
mapp.Commit()
// Create CDP
msgs := []sdk.Msg{NewMsgCreateOrModifyCDP(testAddr, "xrp", i(10), i(5))}
mock.SignCheckDeliver(t, mapp.Cdc, mapp.BaseApp, abci.Header{Height: mapp.LastBlockHeight() + 1}, msgs, []uint64{0}, []uint64{0}, true, true, testPrivKey)
mock.CheckBalance(t, mapp, testAddr, cs(c(StableDenom, 5), c("xrp", 90)))
// Modify CDP
msgs = []sdk.Msg{NewMsgCreateOrModifyCDP(testAddr, "xrp", i(40), i(5))}
mock.SignCheckDeliver(t, mapp.Cdc, mapp.BaseApp, abci.Header{Height: mapp.LastBlockHeight() + 1}, msgs, []uint64{0}, []uint64{1}, true, true, testPrivKey)
mock.CheckBalance(t, mapp, testAddr, cs(c(StableDenom, 10), c("xrp", 50)))
// Delete CDP
msgs = []sdk.Msg{NewMsgCreateOrModifyCDP(testAddr, "xrp", i(-50), i(-10))}
mock.SignCheckDeliver(t, mapp.Cdc, mapp.BaseApp, abci.Header{Height: mapp.LastBlockHeight() + 1}, msgs, []uint64{0}, []uint64{2}, true, true, testPrivKey)
mock.CheckBalance(t, mapp, testAddr, cs(c("xrp", 100)))
} | explode_data.jsonl/78112 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 723
} | [
2830,
3393,
2164,
34325,
44427,
6435,
6484,
47,
1155,
353,
8840,
836,
8,
341,
197,
322,
18626,
198,
2109,
676,
11,
53416,
1669,
18620,
11571,
2164,
26040,
84652,
741,
82281,
14603,
82,
11,
912,
5428,
11,
8358,
6095,
8850,
1669,
7860,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.